sessionInfo()
## R version 3.5.2 (2018-12-20)
## Platform: x86_64-w64-mingw32/x64 (64-bit)
## Running under: Windows 10 x64 (build 17763)
## 
## Matrix products: default
## 
## locale:
## [1] LC_COLLATE=English_United States.1252 
## [2] LC_CTYPE=English_United States.1252   
## [3] LC_MONETARY=English_United States.1252
## [4] LC_NUMERIC=C                          
## [5] LC_TIME=English_United States.1252    
## 
## attached base packages:
## [1] stats     graphics  grDevices utils     datasets  methods   base     
## 
## loaded via a namespace (and not attached):
##  [1] compiler_3.5.2  magrittr_1.5    tools_3.5.2     htmltools_0.3.6
##  [5] yaml_2.2.0      Rcpp_1.0.0      stringi_1.2.4   rmarkdown_1.11 
##  [9] knitr_1.21      stringr_1.3.1   xfun_0.4        digest_0.6.18  
## [13] evaluate_0.12

User Inputs

output.var = params$output.var 
transform.abs = params$transform.abs
log.pred = params$log.pred
norm.pred = params$norm.pred
eda = params$eda
algo.forward = params$algo.forward
algo.backward = params$algo.backward
algo.stepwise = params$algo.stepwise
algo.LASSO = params$algo.LASSO
algo.LARS = params$algo.LARS
  
algo.forward.caret = params$algo.forward.caret
algo.backward.caret = params$algo.backward.caret
algo.stepwise.caret = params$algo.stepwise.caret
algo.LASSO.caret = params$algo.LASSO.caret
algo.LARS.caret = params$algo.LARS.caret

message("Parameters used for training/prediction: ")
## Parameters used for training/prediction:
str(params)
## List of 15
##  $ output.var         : chr "y3"
##  $ transform.abs      : logi FALSE
##  $ log.pred           : logi FALSE
##  $ norm.pred          : logi TRUE
##  $ eda                : logi FALSE
##  $ algo.forward       : logi FALSE
##  $ algo.backward      : logi FALSE
##  $ algo.stepwise      : logi FALSE
##  $ algo.LASSO         : logi FALSE
##  $ algo.LARS          : logi FALSE
##  $ algo.forward.caret : logi TRUE
##  $ algo.backward.caret: logi TRUE
##  $ algo.stepwise.caret: logi TRUE
##  $ algo.LASSO.caret   : logi TRUE
##  $ algo.LARS.caret    : logi TRUE
# Setup Labels
# alt.scale.label.name = Alternate Scale variable name
#   - if predicting on log, then alt.scale is normal scale
#   - if predicting on normal scale, then alt.scale is log scale
if (log.pred == TRUE){
  label.names = paste('log.',output.var,sep="")
  alt.scale.label.name = output.var
}
if (log.pred == FALSE & norm.pred==FALSE){
  label.names = output.var
  alt.scale.label.name = paste('log.',output.var,sep="")
}
if (norm.pred==TRUE){
  label.names = paste('norm.',output.var,sep="")
  alt.scale.label.name = output.var
}

Prepare Data

Read and Clean Features

features = read.csv("../../Data/features.csv")
features.highprec = read.csv("../../Data/features_highprec.csv")
all.equal(features, features.highprec)
##  [1] "Component \"x11\": Mean relative difference: 0.001401482"     
##  [2] "Component \"stat9\": Mean relative difference: 0.0002946299"  
##  [3] "Component \"stat12\": Mean relative difference: 0.0005151515" 
##  [4] "Component \"stat13\": Mean relative difference: 0.001354369"  
##  [5] "Component \"stat18\": Mean relative difference: 0.0005141104" 
##  [6] "Component \"stat22\": Mean relative difference: 0.001135977"  
##  [7] "Component \"stat25\": Mean relative difference: 0.0001884615" 
##  [8] "Component \"stat29\": Mean relative difference: 0.001083691"  
##  [9] "Component \"stat36\": Mean relative difference: 0.00021513"   
## [10] "Component \"stat37\": Mean relative difference: 0.0004578125" 
## [11] "Component \"stat43\": Mean relative difference: 0.0003473684" 
## [12] "Component \"stat45\": Mean relative difference: 0.0002951699" 
## [13] "Component \"stat46\": Mean relative difference: 0.0009745763" 
## [14] "Component \"stat47\": Mean relative difference: 8.829902e-05" 
## [15] "Component \"stat55\": Mean relative difference: 0.001438066"  
## [16] "Component \"stat57\": Mean relative difference: 0.0001056911" 
## [17] "Component \"stat58\": Mean relative difference: 0.0004882261" 
## [18] "Component \"stat60\": Mean relative difference: 0.0002408377" 
## [19] "Component \"stat62\": Mean relative difference: 0.0004885106" 
## [20] "Component \"stat66\": Mean relative difference: 1.73913e-06"  
## [21] "Component \"stat67\": Mean relative difference: 0.0006265823" 
## [22] "Component \"stat73\": Mean relative difference: 0.003846154"  
## [23] "Component \"stat75\": Mean relative difference: 0.002334906"  
## [24] "Component \"stat83\": Mean relative difference: 0.0005628415" 
## [25] "Component \"stat86\": Mean relative difference: 0.0006104418" 
## [26] "Component \"stat94\": Mean relative difference: 0.001005115"  
## [27] "Component \"stat97\": Mean relative difference: 0.0003551913" 
## [28] "Component \"stat98\": Mean relative difference: 0.0006157635" 
## [29] "Component \"stat106\": Mean relative difference: 0.0008267717"
## [30] "Component \"stat109\": Mean relative difference: 0.0005121359"
## [31] "Component \"stat110\": Mean relative difference: 0.0007615527"
## [32] "Component \"stat111\": Mean relative difference: 0.001336134" 
## [33] "Component \"stat114\": Mean relative difference: 7.680492e-05"
## [34] "Component \"stat117\": Mean relative difference: 0.0002421784"
## [35] "Component \"stat122\": Mean relative difference: 0.0006521084"
## [36] "Component \"stat123\": Mean relative difference: 8.333333e-05"
## [37] "Component \"stat125\": Mean relative difference: 0.002385135" 
## [38] "Component \"stat130\": Mean relative difference: 0.001874016" 
## [39] "Component \"stat132\": Mean relative difference: 0.0003193182"
## [40] "Component \"stat135\": Mean relative difference: 0.0001622517"
## [41] "Component \"stat136\": Mean relative difference: 7.722008e-05"
## [42] "Component \"stat138\": Mean relative difference: 0.0009739953"
## [43] "Component \"stat143\": Mean relative difference: 0.0004845361"
## [44] "Component \"stat146\": Mean relative difference: 0.0005821596"
## [45] "Component \"stat148\": Mean relative difference: 0.0005366922"
## [46] "Component \"stat153\": Mean relative difference: 0.0001557522"
## [47] "Component \"stat154\": Mean relative difference: 0.001351916" 
## [48] "Component \"stat157\": Mean relative difference: 0.0005427928"
## [49] "Component \"stat162\": Mean relative difference: 0.002622951" 
## [50] "Component \"stat167\": Mean relative difference: 0.0005905172"
## [51] "Component \"stat168\": Mean relative difference: 0.0002791096"
## [52] "Component \"stat169\": Mean relative difference: 0.0004121827"
## [53] "Component \"stat170\": Mean relative difference: 0.0004705882"
## [54] "Component \"stat174\": Mean relative difference: 0.0003822894"
## [55] "Component \"stat179\": Mean relative difference: 0.0008286604"
## [56] "Component \"stat184\": Mean relative difference: 0.0007526718"
## [57] "Component \"stat187\": Mean relative difference: 0.0005122768"
## [58] "Component \"stat193\": Mean relative difference: 4.215116e-05"
## [59] "Component \"stat199\": Mean relative difference: 0.002155844" 
## [60] "Component \"stat203\": Mean relative difference: 0.0003738318"
## [61] "Component \"stat213\": Mean relative difference: 0.000667676" 
## [62] "Component \"stat215\": Mean relative difference: 0.0003997955"
head(features)
##     JobName        x1       x2       x3        x4       x5        x6
## 1 Job_00001 2.0734508 4.917267 19.96188  3.520878 7.861051 1.6067589
## 2 Job_00002 2.2682543 4.955773 19.11939 19.763031 6.931355 1.3622041
## 3 Job_00003 1.7424456 2.059819 13.37912 38.829132 6.274053 2.0529845
## 4 Job_00004 0.7873555 2.613983 17.23044 64.402557 5.377652 0.9067419
## 5 Job_00005 2.3342753 4.299076 14.64883 52.537304 6.793368 2.4605792
## 6 Job_00006 1.2365089 2.795370 11.13127 96.819939 6.583971 2.3510606
##         x7        x8       x9       x10      x11      x12       x13
## 1 2.979479  8.537228 1.103368 4.6089458 1.05e-07 7.995825 13.215498
## 2 2.388119  6.561461 0.588572 1.0283282 1.03e-07 7.486966 22.557224
## 3 2.043592 10.275595 4.834385 4.3872848 1.06e-07 6.350142 15.049810
## 4 2.395118 13.487331 3.340190 4.5053501 9.47e-08 9.548698 17.170635
## 5 2.891535  9.362389 1.246039 1.7333300 1.01e-07 9.596095  5.794567
## 6 1.247838  7.033354 1.852231 0.4839371 1.07e-07 3.810983 23.863169
##        x14       x15       x16      x17      x18      x19      x20
## 1 4.377983 0.2370623  6.075459 3.988347 4.767475 2.698775 1.035893
## 2 2.059315 0.5638121  6.903891 4.152054 6.849232 9.620731 1.915288
## 3 3.260057 2.0603445  8.424065 4.489893 3.493591 4.715386 1.558103
## 4 3.093478 1.8806034 11.189792 2.134271 5.588357 5.107871 1.489588
## 5 3.943076 1.5820830  7.096742 3.563378 7.765610 1.360272 1.240283
## 6 1.280562 1.1733382  7.062051 1.341864 7.748325 5.009365 1.725179
##        x21      x22      x23      stat1      stat2      stat3      stat4
## 1 42.36548 1.356213 2.699796  2.3801832  0.1883335 -1.2284011 -0.5999233
## 2 26.63295 4.053961 2.375127 -1.4069480  1.8140973  1.6204884  2.6422672
## 3 20.09693 3.079888 4.488420 -0.7672566 -0.1230289  1.1415752  2.9805934
## 4 32.60415 1.355396 3.402398  0.4371202 -1.9355906  0.9028624 -1.6025400
## 5 44.58361 1.940301 2.249011  2.4492466 -0.6172000 -2.5520642 -2.1485929
## 6 28.75102 2.500499 5.563972 -1.7899084  1.8853619  2.4154840 -2.6022179
##          stat5      stat6      stat7      stat8      stat9      stat10
## 1  0.148893163 -0.6622978 -2.4851868  0.3647782  2.5364335  2.92067981
## 2  1.920768980  1.7411555 -1.9599979 -2.0190558 -1.3732762 -0.31642506
## 3  2.422584300 -0.4166040  2.2205689 -2.6741531  0.4844292  2.73379230
## 4 -0.001795933 -0.6946563 -0.3693534 -0.9709467  1.7960306  0.74771154
## 5 -2.311132430 -1.0166832  2.7269876  1.5424492 -1.3156369 -0.09767897
## 6 -1.785491470 -1.8599915  1.4875095  2.0188572 -1.4892503 -1.41103566
##       stat11    stat12     stat13     stat14     stat15     stat16
## 1 -2.3228905 -2.480567 -0.6335157 -0.3650149 -0.5322812  0.6029300
## 2 -0.8547903  1.119316  0.7227427  0.2121097 -0.1452281 -2.0361528
## 3 -2.1821580  2.865401 -2.9756081  2.9871745  1.9539525 -1.8857163
## 4  1.3982378  1.856765 -1.0379983  2.3341896  2.3057184 -2.8947697
## 5  0.9567220  2.567549  0.3184886  1.0307668  0.1644241 -0.6613821
## 6  0.5341771 -1.461822  0.4402476 -1.9282095 -0.3680157  1.8188807
##        stat17     stat18     stat19     stat20     stat21     stat22
## 1 -1.04516208  2.3544915  2.4049001  0.2633883 -0.9788178  1.7868229
## 2  0.09513074  0.4727738  1.8899702  2.7892542 -1.3919091 -1.7198164
## 3  0.40285346  1.4655282 -1.4952788  2.9162340 -2.3893208  2.8161423
## 4  2.97446084  2.3896182  2.3083484 -1.1894441 -2.1982553  1.3666242
## 5 -0.98465055  0.6900643  1.5894209 -2.1204538  1.7961155 -0.9362189
## 6 -1.45726359 -2.1139548 -0.3964904  1.1764175 -2.9100556 -2.1359294
##       stat23     stat24     stat25    stat26     stat27      stat28
## 1 -2.3718851  2.8580718 -0.4719713 -2.817086 -0.9518474  2.88892484
## 2 -2.3293245  1.5577759 -1.9569720  1.554194 -0.5081459 -1.58715141
## 3 -2.5402296  0.1422861  0.3572798 -1.051886 -2.1541717  0.03074004
## 4 -1.9679050 -1.4077642  2.5097435  1.683121 -0.2549745 -2.90384054
## 5  2.0523429 -2.2084844 -1.9280857 -2.116736  1.8180779 -1.42167580
## 6  0.2184991 -0.7599817  2.6880329 -2.903350 -1.0733233 -2.92416644
##       stat29     stat30     stat31     stat32      stat33     stat34
## 1  0.7991088 -2.0059092 -0.2461502  0.6482101 -2.87462163 -0.3601543
## 2  1.9758110 -0.3874187  1.3566630  2.6493473  2.28463054  1.8591728
## 3 -0.4460218  1.0279679  1.3998452 -1.0183365  1.41109037 -2.4183984
## 4  1.0571996  2.5588036 -2.9830337 -1.1299983  0.05470414 -1.5566561
## 5  0.8854889  2.2774174  2.6499031  2.3053405 -2.39148426 -1.8272992
## 6 -0.8405267  0.1311945  0.4321289 -2.9622040 -2.55387473  2.6396458
##       stat35     stat36     stat37     stat38     stat39     stat40
## 1  2.4286051 -0.5420244 -2.6782637 -2.8874269 -0.8945006  1.1749642
## 2  1.3709245 -1.3714181  1.3901204  1.2273489 -0.8934880  1.0540369
## 3 -0.9805572  2.0571353  0.8845031  2.0574493  1.1222047  1.8528618
## 4  1.0969149 -2.2820673  1.8852408  0.5391517  2.7334342 -0.4372566
## 5 -1.0971669  1.4867796 -2.3738465 -0.3743561  1.4266498  1.2551680
## 6  0.4584349 -2.2696617 -0.9935142 -0.5350499 -0.7874799  2.0009417
##       stat41     stat42     stat43     stat44     stat45      stat46
## 1 -1.0474428 -1.3909023 2.54110503 -1.4320793  0.6298335 -2.09296608
## 2  2.5380247  1.6476108 0.44128850 -2.5049477  1.2726039  1.72492969
## 3  1.1477574  0.2288794 0.08891252  2.3044751 -0.7735722 -0.07302936
## 4 -1.3808300 -2.7900956 2.38297582  0.1686397 -2.1591296  1.60828602
## 5  0.2257536  1.9542116 2.66429019  0.8026123 -1.5521187  1.61751962
## 6 -1.3364114 -2.2898803 2.80735397 -0.8413086  1.0057797 -1.50653386
##       stat47     stat48     stat49     stat50     stat51     stat52
## 1 -2.8318939  2.1445766  0.5668035  0.1544579  0.6291955  2.2197027
## 2 -0.5804687 -1.3689737  1.4908396  1.2465997  0.8896304 -2.6024318
## 3  0.7918019  1.5712964  1.1038082 -0.2545658 -2.1662638  0.2660159
## 4 -1.8894132  0.5680230 -0.7023218 -0.3972188  0.1578027  2.1770194
## 5  2.1088455 -2.7195437  2.1961412 -0.2615084  1.2109556  0.8260623
## 6 -1.4400891 -0.9421459 -1.7324599 -2.1720727 -2.8129435  0.6958785
##      stat53     stat54      stat55     stat56     stat57      stat58
## 1  2.176805  0.5546907 -2.19704103 -0.2884173  1.3232913 -1.32824039
## 2 -2.107441  1.3864788  0.08781975  1.9998228  0.8014438 -0.26979154
## 3  1.234197  2.1337581  1.65231645 -0.4388691 -0.1811156  2.11277962
## 4  2.535406 -2.1387620  0.12856023 -1.9906180  0.9626449  1.65232646
## 5 -2.457080  2.1633499  0.60441124  2.5449364 -1.4978440  2.60542655
## 6  2.003033 -0.5379940 -2.19647264 -1.1954677 -0.5974466 -0.04703835
##        stat59     stat60    stat61      stat62     stat63     stat64
## 1  1.24239659 -2.5798278  1.327928  1.68560362  0.6284891 -1.6798652
## 2  0.06379301  0.9465770  1.116928  0.03128772 -2.1944375  0.3382609
## 3  0.93223447  2.4597080  0.465251 -1.71033382 -0.5156728  1.8276784
## 4 -0.29840910  0.7273473 -2.313066 -1.47696018  2.5910559 -1.5127999
## 5 -1.17610002 -1.7948418 -2.669305  0.17813617  2.8956099  2.9411416
## 6 -1.01793981  0.2817057  2.228023 -0.86494124 -0.9747949 -0.1569053
##       stat65     stat66     stat67     stat68     stat69    stat70
## 1 -2.9490898 -0.3325469  1.5745990 -2.2978280  1.5451891 -1.345990
## 2 -1.1174885 -1.5728682 -2.9229002  0.2658547 -1.9616533  2.506130
## 3 -0.2231264 -0.4503301  0.7932286 -1.2453773 -2.2309763  2.309761
## 4 -0.3522418 -2.0720532  0.9442933  2.9212906  0.5100371 -2.441108
## 5 -2.1648991  1.2002029  2.8266985  0.7461294  1.6772674 -1.280000
## 6 -2.2295458  1.1446493  0.2024925 -0.2983998 -2.8203752  1.224030
##       stat71     stat72     stat73     stat74      stat75     stat76
## 1  1.0260956  2.1071210  2.6625669 -2.8924677 -0.02132523 -2.5058765
## 2  0.3525076  1.6922342 -1.2167022 -1.7271879  2.21176434  1.9329683
## 3 -2.1799035 -2.2645276  0.1415582  0.9887453  1.95592320  0.2951785
## 4 -2.4051409  2.0876484 -0.8632146  0.4011389 -1.16986716 -1.2391174
## 5  1.3538754 -0.8089395 -0.5122626 -2.1696892  1.07344925  2.6696169
## 6 -2.8073371 -1.4450488  0.5481212 -1.4381690  0.80917043 -0.1365944
##       stat77      stat78      stat79     stat80     stat81     stat82
## 1 -2.5631845 -2.40331340  0.38416120 -1.2564875 -0.1550840 -1.1762617
## 2 -0.4462085  0.38400793  1.80483031 -0.8387642  0.7624431  0.9936900
## 3  1.6757870 -1.81900752  2.70904708 -0.3201959  2.5754235  1.6346260
## 4 -2.1012006 -2.24691081  1.78056848  1.0323739  1.0762523  2.1343851
## 5 -2.5736733 -1.99958372 -0.05388495 -2.5630073 -2.8783002 -0.5752426
## 6  1.6143972  0.03233746  2.90835762  1.4000487  2.9275615 -2.8503830
##       stat83     stat84     stat85     stat86    stat87     stat88
## 1  1.2840565 -2.6794965  1.3956039 -1.5290235  2.221152  2.3794982
## 2 -0.2380048  1.9314318 -1.6747955 -0.3663656  1.582659 -0.5222489
## 3 -0.9150769 -1.5520337  2.4186287  2.7273662  1.306642  0.1320062
## 4 -2.5824408 -2.7775943  0.5085060  0.4689015  2.053348  0.7957955
## 5 -1.0017741 -0.2009138  0.3770109  2.4335201 -1.118058  1.3953410
## 6  2.4891765  2.9931953 -1.4171852  0.3905659 -1.856119 -2.1690490
##       stat89     stat90     stat91      stat92     stat93     stat94
## 1 -0.9885110 -0.8873261 -2.7810929 -1.53325891  2.6002395  1.8890998
## 2  0.9982028 -1.2382015 -0.1574496  0.41086048 -0.5412626 -0.2421387
## 3  0.5956759  1.6871066  2.2452753  2.74279594 -1.5860478  2.9393122
## 4  2.0902634  2.1752586 -2.0677712 -2.37861037  1.1653302  0.1500632
## 5  2.9820614  0.8111660 -0.7842287  0.03766387 -1.1681970  2.1217251
## 6 -1.7428021  0.1579032  1.7456742 -0.36858466 -0.1304616 -1.4555819
##       stat95     stat96      stat97     stat98     stat99   stat100
## 1 -2.6056035 -0.5814857  2.57652426 -2.3297751  2.6324007  1.445827
## 2 -2.0271583 -0.9126074  2.49582648  0.9745382  1.1339203 -2.549544
## 3  0.3823181 -0.6324139  2.46221566  1.1151560  0.4624891  0.107072
## 4  2.6414623 -0.6630505  2.10394859  1.2627635  0.4861740  1.697012
## 5  1.4642254  2.6485956 -0.07699547  0.6219473 -1.8815142 -2.685463
## 6  1.8937331 -0.4690555  1.04671776 -0.5879866 -0.9766789  2.405940
##      stat101   stat102    stat103    stat104    stat105    stat106
## 1 -2.1158021  2.603936  1.7745128 -1.8903574 -1.8558655  1.0122044
## 2 -2.7998588 -2.267895  0.5336456 -0.2859477 -0.5196246 -0.9417582
## 3  0.7969509 -1.744906 -0.7960327  1.9767258 -0.2007264 -0.7872376
## 4  1.7071959 -1.540221  1.6770362  1.5395796 -0.4855365 -1.2894115
## 5 -1.4627420 -1.700983  2.4376490  0.2731541  1.5275587  1.3256483
## 6  2.6888530  1.090155  2.0769854  1.9615480  1.8689761  2.8975825
##     stat107    stat108   stat109    stat110    stat111    stat112
## 1  1.954508 -0.3376471  2.503084  0.3099165  2.7209847 -2.3911204
## 2 -2.515160  0.3998704 -1.077093  2.4228268 -0.7759693  0.2513882
## 3  1.888827  1.5819857 -2.066659 -2.0008364  0.6997684  2.6157095
## 4  1.076395 -1.8524148 -2.689204  1.0985872  1.2389493  2.1018629
## 5  2.828866 -1.8590252 -2.424163  1.4391942 -0.6173239 -1.5218846
## 6 -1.419639  0.7888914  1.996463  0.9813507  0.9034198  1.3810679
##     stat113    stat114    stat115     stat116   stat117    stat118
## 1 -1.616161  1.0878664  0.9860094 -0.06288462 -1.013501 -1.2212842
## 2 -1.554771  1.8683100  0.4880588 -0.63865489 -1.610217 -1.7713343
## 3 -2.679801 -2.9486952  1.7753417  0.90311784 -1.318836 -0.1429040
## 4  2.459229 -0.5584171  0.4419581 -0.09586351  0.595442  0.2883342
## 5 -2.102200  1.6300170 -2.3498287  1.36771894 -1.912202 -0.2563821
## 6 -1.835037  0.6577786 -2.9928374  2.13540316 -1.437299 -0.9570006
##      stat119    stat120    stat121    stat122    stat123     stat124
## 1  2.9222729  1.9151262  1.6686068  2.0061224  1.5723072  0.78819227
## 2  2.1828208  0.8283178 -2.4458632  1.7133740  1.1393738 -0.07182054
## 3  0.9721319  1.2723130  2.8002086  2.7670381 -2.2252586  2.17499113
## 4 -1.9327896 -2.5369370  1.7835028  1.0262097 -1.8790983 -0.43639564
## 5  1.3230809 -2.8145256 -0.9547533 -2.0435417 -0.2758764 -1.85668027
## 6  0.1720700 -1.4568460  1.4115051 -0.9878145  2.3895061 -2.33730745
##     stat125    stat126    stat127   stat128    stat129     stat130
## 1  1.588372  1.1620011 -0.2474264  1.650328  2.5147598  0.37283245
## 2 -1.173771  0.8162020  0.3510315 -1.263667  1.7245284 -0.72852904
## 3 -1.503497 -0.5656394  2.8040256 -2.139287 -1.7221642  2.17899609
## 4  1.040967 -2.9039600  0.3103742  1.462339 -1.2940350 -2.95015502
## 5 -2.866184  1.6885070 -2.2525666 -2.628631  1.8581577  2.80127025
## 6 -1.355111  1.5017927  0.4295921 -0.580415  0.9851009 -0.03773117
##       stat131    stat132    stat133    stat134    stat135      stat136
## 1 -0.09028241  0.5194538  2.8478346  2.6664724 -2.0206311  1.398415090
## 2 -0.53045595  1.4134049  2.9180586  0.3299096  1.4784122 -1.278896090
## 3  1.35843194  0.2279946  0.3532595  0.6138676 -0.3443284  0.057763811
## 4 -1.92450273  1.2698178 -1.5299660 -2.6083462  1.1665530 -0.187791914
## 5  1.49036849  2.6337729 -2.3206244  0.4978287 -1.7397571  0.001200184
## 6 -0.64642709 -1.9256228  1.7032650 -0.9152725 -0.3188055  2.155395980
##      stat137    stat138    stat139    stat140    stat141    stat142
## 1 -1.2794871  0.4064890 -0.4539998  2.6660173 -1.8375313  0.4711883
## 2 -2.7709017 -1.6303773 -1.9025910  0.2572918  0.6612002  1.4764348
## 3 -1.1930757 -0.1051243 -0.5108380 -1.0879666  2.4969513 -0.9477230
## 4 -1.2318919  2.2348571  0.1788580 -1.5851788 -1.2384283 -2.1859181
## 5  1.8685058  2.7229517 -2.9077182  2.6606939 -1.5963592 -2.2213492
## 6 -0.4807318 -1.2117369 -0.9358531 -2.5100758 -2.3803916 -0.7096854
##     stat143    stat144    stat145    stat146     stat147    stat148
## 1 1.9466263  2.2689433 -0.3597288 -0.6551386  1.65438592  0.6404466
## 2 1.3156421  2.4459090 -0.3790028  1.4858465 -0.07784461  1.0096149
## 3 0.1959563  2.3062942  1.8459278  2.6848175 -2.70935774 -1.2093409
## 4 1.7633296 -2.8171508  2.0902622 -2.6625464 -1.12600601 -2.1926479
## 5 0.3885758  1.8160636  2.8257299 -1.4526173  1.60679603  2.3807991
## 6 0.7623450  0.2692145 -2.4307463 -2.1244523 -2.67803812 -1.5273387
##      stat149   stat150    stat151    stat152    stat153    stat154
## 1  0.1583575 0.4755351  0.3213410  2.0241520  1.5720103 -0.1825875
## 2 -0.4311406 2.9577663  0.6937252  0.1397280  0.3775735 -1.1012636
## 3 -0.8352824 2.5716205  1.7528236  0.4326277 -2.2334397 -2.6265771
## 4 -2.8069143 1.8813509  2.3358023  0.1015632  1.2117474 -1.3714278
## 5 -1.6166265 1.1112266 -1.1998471  2.9316769 -2.1676455 -0.3411089
## 6 -0.2265472 2.7264354 -1.6746094 -2.3376281 -1.7022788 -1.2352397
##     stat155     stat156    stat157    stat158     stat159   stat160
## 1 -1.139657  0.07061254  0.5893906 -1.9920996 -2.83714366  2.249398
## 2 -2.041093  0.74047768  2.5415072 -1.2697256 -1.64364433 -2.448922
## 3 -1.219507 -0.55198693  0.4046920  1.2098547 -0.90412390 -1.934093
## 4  2.992191  2.33222485  2.0622969 -0.6714653  2.76836085 -1.431120
## 5 -2.362356 -1.23906672  0.4746319 -0.7849202  0.69399995  2.052411
## 6 -1.604499  1.31051409 -0.5164744  0.6288667  0.07899523 -2.287402
##      stat161    stat162    stat163    stat164    stat165    stat166
## 1  1.7182635 -1.2323593  2.7350423  1.0707235  1.1621544  0.9493989
## 2 -0.6247674  2.6740098  2.8211024  1.5561292 -1.1027147  1.0519739
## 3 -0.6230453 -0.7993517 -2.8318374 -1.1148673  1.4261659  0.5294309
## 4  1.7644744  0.1696584  1.2653207  0.6621516  0.9470508  0.1985014
## 5 -1.2070210  0.7243784  0.9736322  2.7426259 -2.6862383  1.6840212
## 6  2.3705316 -2.1667893 -0.2516685 -0.8425958 -1.9099342 -2.8607297
##      stat167    stat168    stat169     stat170     stat171    stat172
## 1  0.1146510  2.3872008  1.1180918 -0.95370555 -2.25076509  0.2348182
## 2  1.0760417 -2.0449336  0.9715676 -0.40173489 -0.11953555 -2.3107369
## 3  1.1735898  1.3860190 -2.2894719  0.06350347  0.29191551 -1.6079744
## 4  2.5511832  0.5446648  1.2694012 -0.84571201  0.79789722  0.2623538
## 5  2.2900002  2.6289782 -0.2783571  1.39032829 -0.55532032  1.0499046
## 6 -0.7513983  2.9617066 -2.2119520 -1.71958113 -0.01452018 -0.2751517
##       stat173   stat174     stat175    stat176     stat177    stat178
## 1  1.79366076 -1.920206 -0.38841942  0.8530301  1.64532077 -1.1354179
## 2 -0.07484659  1.337846  2.20911694  0.9616837 -2.80810070 -2.1136749
## 3 -1.05521810 -1.483741  0.06148359  2.3066039 -0.34688616  1.1840581
## 4  0.31460321  1.195741  2.97633862  1.1685091 -0.06346265  1.4205489
## 5 -1.39428365  2.458523  0.64836472 -1.0396386 -0.57828104 -0.5006818
## 6  2.31844401  1.239864 -2.06490874  0.7696204 -1.77586019  2.0855925
##      stat179    stat180     stat181    stat182    stat183    stat184
## 1  2.0018647  0.1476815 -1.27279520  1.9181504 -0.5297624 -2.9718938
## 2 -2.1351449  2.9012582 -1.09914911 -2.5488517 -2.8377736  1.4073374
## 3 -1.7819908  2.9902627  0.81908613  0.2503852  0.3712984 -2.1714024
## 4 -0.1026974 -2.4763253 -2.52645421  1.3096315  2.1458161 -1.5228094
## 5 -2.2298794  2.4465680 -0.70346898 -1.6997617  2.9178164 -0.3615532
## 6 -1.1168108  1.5552123 -0.01361342  1.7338791 -1.1104763  0.1882416
##      stat185    stat186   stat187    stat188    stat189    stat190
## 1 -0.1043832 -1.5047463  2.700351 -2.4780862 -1.9078265  0.9978108
## 2 -2.0310574 -0.5380074 -1.963275 -1.2221278 -2.4290681 -1.9515115
## 3  2.6727278  1.2688179 -1.399018 -2.9612138  2.6456394  2.0073323
## 4 -2.7796295  2.0682354  2.243727  0.4296881  0.1931333  2.2710960
## 5 -0.6231265  2.5833981  2.229041  0.8139584  1.4544131  1.8886451
## 6  2.7204690 -2.4469144 -1.421998  1.7477882 -0.1481806  0.6011560
##      stat191    stat192    stat193    stat194   stat195    stat196
## 1 -0.6644351  2.6270833 -1.1094601 -2.4200392  2.870713 -0.6590932
## 2 -0.6483142  1.4519118 -0.1963493 -2.3025322  1.255608  2.1617947
## 3 -1.5457382 -0.2977442 -1.7045015  0.7962404 -1.696063 -1.4771117
## 4 -1.1780495 -2.9747574 -1.1471518 -1.2377013 -1.010672 -2.6055975
## 5  2.8813178 -1.8964081 -1.2653487 -1.7839754 -2.872581  2.3033464
## 6  0.4437973  0.6599325 -1.4029555 -2.3118258 -1.792232  1.3934380
##       stat197    stat198    stat199    stat200    stat201    stat202
## 1 -0.83056986  0.9550526 -1.7025776 -2.8263099 -0.7023998  0.2272806
## 2 -1.42178249 -1.2471864  2.5723093 -0.0233496 -1.8975239  1.9472262
## 3 -0.19233958 -0.5161456  0.0279946 -1.2333704 -2.9672263 -2.8666208
## 4 -1.23145902  1.4728470 -0.4562025 -2.2983441 -1.5101184  0.2530525
## 5  1.85018563 -1.8269292 -0.6337969 -2.1473246  0.9909850  1.0950903
## 6 -0.09311061  0.5144456 -2.8178268 -2.7555969 -2.3546004 -1.0558939
##        stat203      stat204    stat205    stat206    stat207    stat208
## 1  1.166631220  0.007453276  2.9961641  1.5327307 -2.2293356 -0.9946009
## 2 -0.235396504  2.132749800  0.3707606  1.5604026 -1.0089217  2.1474257
## 3  0.003180946  2.229793310  2.7354040  0.8992231  2.9694967  2.3081024
## 4 -0.474482715 -1.584772230 -2.3224132 -0.9409741 -2.3179255  0.8032548
## 5  2.349412920 -1.276320220 -2.0203719 -1.1733509  1.0371852 -2.5086207
## 6  0.727436960 -0.960191786 -0.8964998 -1.6406623 -0.2330488  1.7993879
##      stat209    stat210   stat211    stat212    stat213    stat214
## 1 -2.2182105 -1.4099774 -1.656754  2.6602585 -2.9270992  1.1240714
## 2 -2.8932488 -1.1641679 -2.605423 -1.5650513  2.9523673  2.0266318
## 3 -1.8279589  0.0472350 -2.026734  2.5054367  0.9903042  0.3274105
## 4 -1.0878067  0.1171303  2.645891 -1.6775225  1.3452160  1.4694063
## 5 -0.8158175  0.4060950  0.912256  0.2925677  2.1610141  0.5679936
## 6 -2.2664354 -0.2061083 -1.435174  2.6645632  0.4216259 -0.6419122
##      stat215    stat216    stat217
## 1 -2.7510750 -0.5501796  1.2638469
## 2  2.8934650 -2.4099574 -1.2411407
## 3 -1.0947676  1.2852937  1.5411530
## 4  0.6343777  0.1345372  2.9102673
## 5  0.9908702  1.7909757 -2.0902610
## 6 -2.8113887 -1.0624912  0.2765074
head(features.highprec)
##     JobName        x1       x2       x3        x4       x5        x6
## 1 Job_00001 2.0734508 4.917267 19.96188  3.520878 7.861051 1.6067589
## 2 Job_00002 2.2682543 4.955773 19.11939 19.763031 6.931355 1.3622041
## 3 Job_00003 1.7424456 2.059819 13.37912 38.829132 6.274053 2.0529845
## 4 Job_00004 0.7873555 2.613983 17.23044 64.402557 5.377652 0.9067419
## 5 Job_00005 2.3342753 4.299076 14.64883 52.537304 6.793368 2.4605792
## 6 Job_00006 1.2365089 2.795370 11.13127 96.819939 6.583971 2.3510606
##         x7        x8       x9       x10          x11      x12       x13
## 1 2.979479  8.537228 1.103368 4.6089458 1.050025e-07 7.995825 13.215498
## 2 2.388119  6.561461 0.588572 1.0283282 1.034518e-07 7.486966 22.557224
## 3 2.043592 10.275595 4.834385 4.3872848 1.062312e-07 6.350142 15.049810
## 4 2.395118 13.487331 3.340190 4.5053501 9.471887e-08 9.548698 17.170635
## 5 2.891535  9.362389 1.246039 1.7333300 1.010552e-07 9.596095  5.794567
## 6 1.247838  7.033354 1.852231 0.4839371 1.071662e-07 3.810983 23.863169
##        x14       x15       x16      x17      x18      x19      x20
## 1 4.377983 0.2370623  6.075459 3.988347 4.767475 2.698775 1.035893
## 2 2.059315 0.5638121  6.903891 4.152054 6.849232 9.620731 1.915288
## 3 3.260057 2.0603445  8.424065 4.489893 3.493591 4.715386 1.558103
## 4 3.093478 1.8806034 11.189792 2.134271 5.588357 5.107871 1.489588
## 5 3.943076 1.5820830  7.096742 3.563378 7.765610 1.360272 1.240283
## 6 1.280562 1.1733382  7.062051 1.341864 7.748325 5.009365 1.725179
##        x21      x22      x23      stat1      stat2      stat3      stat4
## 1 42.36548 1.356213 2.699796  2.3801832  0.1883335 -1.2284011 -0.5999233
## 2 26.63295 4.053961 2.375127 -1.4069480  1.8140973  1.6204884  2.6422672
## 3 20.09693 3.079888 4.488420 -0.7672566 -0.1230289  1.1415752  2.9805934
## 4 32.60415 1.355396 3.402398  0.4371202 -1.9355906  0.9028624 -1.6025400
## 5 44.58361 1.940301 2.249011  2.4492466 -0.6172000 -2.5520642 -2.1485929
## 6 28.75102 2.500499 5.563972 -1.7899084  1.8853619  2.4154840 -2.6022179
##          stat5      stat6      stat7      stat8      stat9      stat10
## 1  0.148893163 -0.6622978 -2.4851868  0.3647782  2.5364335  2.92067981
## 2  1.920768980  1.7411555 -1.9599979 -2.0190558 -1.3732762 -0.31642506
## 3  2.422584300 -0.4166040  2.2205689 -2.6741531  0.4844292  2.73379230
## 4 -0.001795933 -0.6946563 -0.3693534 -0.9709467  1.7960306  0.74771154
## 5 -2.311132430 -1.0166832  2.7269876  1.5424492 -1.3156369 -0.09767897
## 6 -1.785491470 -1.8599915  1.4875095  2.0188572 -1.4892503 -1.41103566
##       stat11    stat12     stat13     stat14     stat15     stat16
## 1 -2.3228905 -2.480567 -0.6335157 -0.3650149 -0.5322812  0.6029300
## 2 -0.8547903  1.119316  0.7227427  0.2121097 -0.1452281 -2.0361528
## 3 -2.1821580  2.865401 -2.9756081  2.9871745  1.9539525 -1.8857163
## 4  1.3982378  1.856765 -1.0379983  2.3341896  2.3057184 -2.8947697
## 5  0.9567220  2.567549  0.3184886  1.0307668  0.1644241 -0.6613821
## 6  0.5341771 -1.461822  0.4402476 -1.9282095 -0.3680157  1.8188807
##        stat17     stat18     stat19     stat20     stat21     stat22
## 1 -1.04516208  2.3544915  2.4049001  0.2633883 -0.9788178  1.7868229
## 2  0.09513074  0.4727738  1.8899702  2.7892542 -1.3919091 -1.7198164
## 3  0.40285346  1.4655282 -1.4952788  2.9162340 -2.3893208  2.8161423
## 4  2.97446084  2.3896182  2.3083484 -1.1894441 -2.1982553  1.3666242
## 5 -0.98465055  0.6900643  1.5894209 -2.1204538  1.7961155 -0.9362189
## 6 -1.45726359 -2.1139548 -0.3964904  1.1764175 -2.9100556 -2.1359294
##       stat23     stat24     stat25    stat26     stat27      stat28
## 1 -2.3718851  2.8580718 -0.4719713 -2.817086 -0.9518474  2.88892484
## 2 -2.3293245  1.5577759 -1.9569720  1.554194 -0.5081459 -1.58715141
## 3 -2.5402296  0.1422861  0.3572798 -1.051886 -2.1541717  0.03074004
## 4 -1.9679050 -1.4077642  2.5097435  1.683121 -0.2549745 -2.90384054
## 5  2.0523429 -2.2084844 -1.9280857 -2.116736  1.8180779 -1.42167580
## 6  0.2184991 -0.7599817  2.6880329 -2.903350 -1.0733233 -2.92416644
##       stat29     stat30     stat31     stat32      stat33     stat34
## 1  0.7991088 -2.0059092 -0.2461502  0.6482101 -2.87462163 -0.3601543
## 2  1.9758110 -0.3874187  1.3566630  2.6493473  2.28463054  1.8591728
## 3 -0.4460218  1.0279679  1.3998452 -1.0183365  1.41109037 -2.4183984
## 4  1.0571996  2.5588036 -2.9830337 -1.1299983  0.05470414 -1.5566561
## 5  0.8854889  2.2774174  2.6499031  2.3053405 -2.39148426 -1.8272992
## 6 -0.8405267  0.1311945  0.4321289 -2.9622040 -2.55387473  2.6396458
##       stat35     stat36     stat37     stat38     stat39     stat40
## 1  2.4286051 -0.5420244 -2.6782637 -2.8874269 -0.8945006  1.1749642
## 2  1.3709245 -1.3714181  1.3901204  1.2273489 -0.8934880  1.0540369
## 3 -0.9805572  2.0571353  0.8845031  2.0574493  1.1222047  1.8528618
## 4  1.0969149 -2.2820673  1.8852408  0.5391517  2.7334342 -0.4372566
## 5 -1.0971669  1.4867796 -2.3738465 -0.3743561  1.4266498  1.2551680
## 6  0.4584349 -2.2696617 -0.9935142 -0.5350499 -0.7874799  2.0009417
##       stat41     stat42     stat43     stat44     stat45      stat46
## 1 -1.0474428 -1.3909023 2.54110503 -1.4320793  0.6298335 -2.09296608
## 2  2.5380247  1.6476108 0.44128850 -2.5049477  1.2726039  1.72492969
## 3  1.1477574  0.2288794 0.08891252  2.3044751 -0.7735722 -0.07302936
## 4 -1.3808300 -2.7900956 2.38297582  0.1686397 -2.1591296  1.60828602
## 5  0.2257536  1.9542116 2.66429019  0.8026123 -1.5521187  1.61751962
## 6 -1.3364114 -2.2898803 2.80735397 -0.8413086  1.0057797 -1.50653386
##       stat47     stat48     stat49     stat50     stat51     stat52
## 1 -2.8318939  2.1445766  0.5668035  0.1544579  0.6291955  2.2197027
## 2 -0.5804687 -1.3689737  1.4908396  1.2465997  0.8896304 -2.6024318
## 3  0.7918019  1.5712964  1.1038082 -0.2545658 -2.1662638  0.2660159
## 4 -1.8894132  0.5680230 -0.7023218 -0.3972188  0.1578027  2.1770194
## 5  2.1088455 -2.7195437  2.1961412 -0.2615084  1.2109556  0.8260623
## 6 -1.4400891 -0.9421459 -1.7324599 -2.1720727 -2.8129435  0.6958785
##      stat53     stat54      stat55     stat56     stat57      stat58
## 1  2.176805  0.5546907 -2.19704103 -0.2884173  1.3232913 -1.32824039
## 2 -2.107441  1.3864788  0.08781975  1.9998228  0.8014438 -0.26979154
## 3  1.234197  2.1337581  1.65231645 -0.4388691 -0.1811156  2.11277962
## 4  2.535406 -2.1387620  0.12856023 -1.9906180  0.9626449  1.65232646
## 5 -2.457080  2.1633499  0.60441124  2.5449364 -1.4978440  2.60542655
## 6  2.003033 -0.5379940 -2.19647264 -1.1954677 -0.5974466 -0.04703835
##        stat59     stat60    stat61      stat62     stat63     stat64
## 1  1.24239659 -2.5798278  1.327928  1.68560362  0.6284891 -1.6798652
## 2  0.06379301  0.9465770  1.116928  0.03128772 -2.1944375  0.3382609
## 3  0.93223447  2.4597080  0.465251 -1.71033382 -0.5156728  1.8276784
## 4 -0.29840910  0.7273473 -2.313066 -1.47696018  2.5910559 -1.5127999
## 5 -1.17610002 -1.7948418 -2.669305  0.17813617  2.8956099  2.9411416
## 6 -1.01793981  0.2817057  2.228023 -0.86494124 -0.9747949 -0.1569053
##       stat65     stat66     stat67     stat68     stat69    stat70
## 1 -2.9490898 -0.3325469  1.5745990 -2.2978280  1.5451891 -1.345990
## 2 -1.1174885 -1.5728682 -2.9229002  0.2658547 -1.9616533  2.506130
## 3 -0.2231264 -0.4503301  0.7932286 -1.2453773 -2.2309763  2.309761
## 4 -0.3522418 -2.0720532  0.9442933  2.9212906  0.5100371 -2.441108
## 5 -2.1648991  1.2002029  2.8266985  0.7461294  1.6772674 -1.280000
## 6 -2.2295458  1.1446493  0.2024925 -0.2983998 -2.8203752  1.224030
##       stat71     stat72     stat73     stat74      stat75     stat76
## 1  1.0260956  2.1071210  2.6625669 -2.8924677 -0.02132523 -2.5058765
## 2  0.3525076  1.6922342 -1.2167022 -1.7271879  2.21176434  1.9329683
## 3 -2.1799035 -2.2645276  0.1415582  0.9887453  1.95592320  0.2951785
## 4 -2.4051409  2.0876484 -0.8632146  0.4011389 -1.16986716 -1.2391174
## 5  1.3538754 -0.8089395 -0.5122626 -2.1696892  1.07344925  2.6696169
## 6 -2.8073371 -1.4450488  0.5481212 -1.4381690  0.80917043 -0.1365944
##       stat77      stat78      stat79     stat80     stat81     stat82
## 1 -2.5631845 -2.40331340  0.38416120 -1.2564875 -0.1550840 -1.1762617
## 2 -0.4462085  0.38400793  1.80483031 -0.8387642  0.7624431  0.9936900
## 3  1.6757870 -1.81900752  2.70904708 -0.3201959  2.5754235  1.6346260
## 4 -2.1012006 -2.24691081  1.78056848  1.0323739  1.0762523  2.1343851
## 5 -2.5736733 -1.99958372 -0.05388495 -2.5630073 -2.8783002 -0.5752426
## 6  1.6143972  0.03233746  2.90835762  1.4000487  2.9275615 -2.8503830
##       stat83     stat84     stat85     stat86    stat87     stat88
## 1  1.2840565 -2.6794965  1.3956039 -1.5290235  2.221152  2.3794982
## 2 -0.2380048  1.9314318 -1.6747955 -0.3663656  1.582659 -0.5222489
## 3 -0.9150769 -1.5520337  2.4186287  2.7273662  1.306642  0.1320062
## 4 -2.5824408 -2.7775943  0.5085060  0.4689015  2.053348  0.7957955
## 5 -1.0017741 -0.2009138  0.3770109  2.4335201 -1.118058  1.3953410
## 6  2.4891765  2.9931953 -1.4171852  0.3905659 -1.856119 -2.1690490
##       stat89     stat90     stat91      stat92     stat93     stat94
## 1 -0.9885110 -0.8873261 -2.7810929 -1.53325891  2.6002395  1.8890998
## 2  0.9982028 -1.2382015 -0.1574496  0.41086048 -0.5412626 -0.2421387
## 3  0.5956759  1.6871066  2.2452753  2.74279594 -1.5860478  2.9393122
## 4  2.0902634  2.1752586 -2.0677712 -2.37861037  1.1653302  0.1500632
## 5  2.9820614  0.8111660 -0.7842287  0.03766387 -1.1681970  2.1217251
## 6 -1.7428021  0.1579032  1.7456742 -0.36858466 -0.1304616 -1.4555819
##       stat95     stat96      stat97     stat98     stat99   stat100
## 1 -2.6056035 -0.5814857  2.57652426 -2.3297751  2.6324007  1.445827
## 2 -2.0271583 -0.9126074  2.49582648  0.9745382  1.1339203 -2.549544
## 3  0.3823181 -0.6324139  2.46221566  1.1151560  0.4624891  0.107072
## 4  2.6414623 -0.6630505  2.10394859  1.2627635  0.4861740  1.697012
## 5  1.4642254  2.6485956 -0.07699547  0.6219473 -1.8815142 -2.685463
## 6  1.8937331 -0.4690555  1.04671776 -0.5879866 -0.9766789  2.405940
##      stat101   stat102    stat103    stat104    stat105    stat106
## 1 -2.1158021  2.603936  1.7745128 -1.8903574 -1.8558655  1.0122044
## 2 -2.7998588 -2.267895  0.5336456 -0.2859477 -0.5196246 -0.9417582
## 3  0.7969509 -1.744906 -0.7960327  1.9767258 -0.2007264 -0.7872376
## 4  1.7071959 -1.540221  1.6770362  1.5395796 -0.4855365 -1.2894115
## 5 -1.4627420 -1.700983  2.4376490  0.2731541  1.5275587  1.3256483
## 6  2.6888530  1.090155  2.0769854  1.9615480  1.8689761  2.8975825
##     stat107    stat108   stat109    stat110    stat111    stat112
## 1  1.954508 -0.3376471  2.503084  0.3099165  2.7209847 -2.3911204
## 2 -2.515160  0.3998704 -1.077093  2.4228268 -0.7759693  0.2513882
## 3  1.888827  1.5819857 -2.066659 -2.0008364  0.6997684  2.6157095
## 4  1.076395 -1.8524148 -2.689204  1.0985872  1.2389493  2.1018629
## 5  2.828866 -1.8590252 -2.424163  1.4391942 -0.6173239 -1.5218846
## 6 -1.419639  0.7888914  1.996463  0.9813507  0.9034198  1.3810679
##     stat113    stat114    stat115     stat116   stat117    stat118
## 1 -1.616161  1.0878664  0.9860094 -0.06288462 -1.013501 -1.2212842
## 2 -1.554771  1.8683100  0.4880588 -0.63865489 -1.610217 -1.7713343
## 3 -2.679801 -2.9486952  1.7753417  0.90311784 -1.318836 -0.1429040
## 4  2.459229 -0.5584171  0.4419581 -0.09586351  0.595442  0.2883342
## 5 -2.102200  1.6300170 -2.3498287  1.36771894 -1.912202 -0.2563821
## 6 -1.835037  0.6577786 -2.9928374  2.13540316 -1.437299 -0.9570006
##      stat119    stat120    stat121    stat122    stat123     stat124
## 1  2.9222729  1.9151262  1.6686068  2.0061224  1.5723072  0.78819227
## 2  2.1828208  0.8283178 -2.4458632  1.7133740  1.1393738 -0.07182054
## 3  0.9721319  1.2723130  2.8002086  2.7670381 -2.2252586  2.17499113
## 4 -1.9327896 -2.5369370  1.7835028  1.0262097 -1.8790983 -0.43639564
## 5  1.3230809 -2.8145256 -0.9547533 -2.0435417 -0.2758764 -1.85668027
## 6  0.1720700 -1.4568460  1.4115051 -0.9878145  2.3895061 -2.33730745
##     stat125    stat126    stat127   stat128    stat129     stat130
## 1  1.588372  1.1620011 -0.2474264  1.650328  2.5147598  0.37283245
## 2 -1.173771  0.8162020  0.3510315 -1.263667  1.7245284 -0.72852904
## 3 -1.503497 -0.5656394  2.8040256 -2.139287 -1.7221642  2.17899609
## 4  1.040967 -2.9039600  0.3103742  1.462339 -1.2940350 -2.95015502
## 5 -2.866184  1.6885070 -2.2525666 -2.628631  1.8581577  2.80127025
## 6 -1.355111  1.5017927  0.4295921 -0.580415  0.9851009 -0.03773117
##       stat131    stat132    stat133    stat134    stat135      stat136
## 1 -0.09028241  0.5194538  2.8478346  2.6664724 -2.0206311  1.398415090
## 2 -0.53045595  1.4134049  2.9180586  0.3299096  1.4784122 -1.278896090
## 3  1.35843194  0.2279946  0.3532595  0.6138676 -0.3443284  0.057763811
## 4 -1.92450273  1.2698178 -1.5299660 -2.6083462  1.1665530 -0.187791914
## 5  1.49036849  2.6337729 -2.3206244  0.4978287 -1.7397571  0.001200184
## 6 -0.64642709 -1.9256228  1.7032650 -0.9152725 -0.3188055  2.155395980
##      stat137    stat138    stat139    stat140    stat141    stat142
## 1 -1.2794871  0.4064890 -0.4539998  2.6660173 -1.8375313  0.4711883
## 2 -2.7709017 -1.6303773 -1.9025910  0.2572918  0.6612002  1.4764348
## 3 -1.1930757 -0.1051243 -0.5108380 -1.0879666  2.4969513 -0.9477230
## 4 -1.2318919  2.2348571  0.1788580 -1.5851788 -1.2384283 -2.1859181
## 5  1.8685058  2.7229517 -2.9077182  2.6606939 -1.5963592 -2.2213492
## 6 -0.4807318 -1.2117369 -0.9358531 -2.5100758 -2.3803916 -0.7096854
##     stat143    stat144    stat145    stat146     stat147    stat148
## 1 1.9466263  2.2689433 -0.3597288 -0.6551386  1.65438592  0.6404466
## 2 1.3156421  2.4459090 -0.3790028  1.4858465 -0.07784461  1.0096149
## 3 0.1959563  2.3062942  1.8459278  2.6848175 -2.70935774 -1.2093409
## 4 1.7633296 -2.8171508  2.0902622 -2.6625464 -1.12600601 -2.1926479
## 5 0.3885758  1.8160636  2.8257299 -1.4526173  1.60679603  2.3807991
## 6 0.7623450  0.2692145 -2.4307463 -2.1244523 -2.67803812 -1.5273387
##      stat149   stat150    stat151    stat152    stat153    stat154
## 1  0.1583575 0.4755351  0.3213410  2.0241520  1.5720103 -0.1825875
## 2 -0.4311406 2.9577663  0.6937252  0.1397280  0.3775735 -1.1012636
## 3 -0.8352824 2.5716205  1.7528236  0.4326277 -2.2334397 -2.6265771
## 4 -2.8069143 1.8813509  2.3358023  0.1015632  1.2117474 -1.3714278
## 5 -1.6166265 1.1112266 -1.1998471  2.9316769 -2.1676455 -0.3411089
## 6 -0.2265472 2.7264354 -1.6746094 -2.3376281 -1.7022788 -1.2352397
##     stat155     stat156    stat157    stat158     stat159   stat160
## 1 -1.139657  0.07061254  0.5893906 -1.9920996 -2.83714366  2.249398
## 2 -2.041093  0.74047768  2.5415072 -1.2697256 -1.64364433 -2.448922
## 3 -1.219507 -0.55198693  0.4046920  1.2098547 -0.90412390 -1.934093
## 4  2.992191  2.33222485  2.0622969 -0.6714653  2.76836085 -1.431120
## 5 -2.362356 -1.23906672  0.4746319 -0.7849202  0.69399995  2.052411
## 6 -1.604499  1.31051409 -0.5164744  0.6288667  0.07899523 -2.287402
##      stat161    stat162    stat163    stat164    stat165    stat166
## 1  1.7182635 -1.2323593  2.7350423  1.0707235  1.1621544  0.9493989
## 2 -0.6247674  2.6740098  2.8211024  1.5561292 -1.1027147  1.0519739
## 3 -0.6230453 -0.7993517 -2.8318374 -1.1148673  1.4261659  0.5294309
## 4  1.7644744  0.1696584  1.2653207  0.6621516  0.9470508  0.1985014
## 5 -1.2070210  0.7243784  0.9736322  2.7426259 -2.6862383  1.6840212
## 6  2.3705316 -2.1667893 -0.2516685 -0.8425958 -1.9099342 -2.8607297
##      stat167    stat168    stat169     stat170     stat171    stat172
## 1  0.1146510  2.3872008  1.1180918 -0.95370555 -2.25076509  0.2348182
## 2  1.0760417 -2.0449336  0.9715676 -0.40173489 -0.11953555 -2.3107369
## 3  1.1735898  1.3860190 -2.2894719  0.06350347  0.29191551 -1.6079744
## 4  2.5511832  0.5446648  1.2694012 -0.84571201  0.79789722  0.2623538
## 5  2.2900002  2.6289782 -0.2783571  1.39032829 -0.55532032  1.0499046
## 6 -0.7513983  2.9617066 -2.2119520 -1.71958113 -0.01452018 -0.2751517
##       stat173   stat174     stat175    stat176     stat177    stat178
## 1  1.79366076 -1.920206 -0.38841942  0.8530301  1.64532077 -1.1354179
## 2 -0.07484659  1.337846  2.20911694  0.9616837 -2.80810070 -2.1136749
## 3 -1.05521810 -1.483741  0.06148359  2.3066039 -0.34688616  1.1840581
## 4  0.31460321  1.195741  2.97633862  1.1685091 -0.06346265  1.4205489
## 5 -1.39428365  2.458523  0.64836472 -1.0396386 -0.57828104 -0.5006818
## 6  2.31844401  1.239864 -2.06490874  0.7696204 -1.77586019  2.0855925
##      stat179    stat180     stat181    stat182    stat183    stat184
## 1  2.0018647  0.1476815 -1.27279520  1.9181504 -0.5297624 -2.9718938
## 2 -2.1351449  2.9012582 -1.09914911 -2.5488517 -2.8377736  1.4073374
## 3 -1.7819908  2.9902627  0.81908613  0.2503852  0.3712984 -2.1714024
## 4 -0.1026974 -2.4763253 -2.52645421  1.3096315  2.1458161 -1.5228094
## 5 -2.2298794  2.4465680 -0.70346898 -1.6997617  2.9178164 -0.3615532
## 6 -1.1168108  1.5552123 -0.01361342  1.7338791 -1.1104763  0.1882416
##      stat185    stat186   stat187    stat188    stat189    stat190
## 1 -0.1043832 -1.5047463  2.700351 -2.4780862 -1.9078265  0.9978108
## 2 -2.0310574 -0.5380074 -1.963275 -1.2221278 -2.4290681 -1.9515115
## 3  2.6727278  1.2688179 -1.399018 -2.9612138  2.6456394  2.0073323
## 4 -2.7796295  2.0682354  2.243727  0.4296881  0.1931333  2.2710960
## 5 -0.6231265  2.5833981  2.229041  0.8139584  1.4544131  1.8886451
## 6  2.7204690 -2.4469144 -1.421998  1.7477882 -0.1481806  0.6011560
##      stat191    stat192    stat193    stat194   stat195    stat196
## 1 -0.6644351  2.6270833 -1.1094601 -2.4200392  2.870713 -0.6590932
## 2 -0.6483142  1.4519118 -0.1963493 -2.3025322  1.255608  2.1617947
## 3 -1.5457382 -0.2977442 -1.7045015  0.7962404 -1.696063 -1.4771117
## 4 -1.1780495 -2.9747574 -1.1471518 -1.2377013 -1.010672 -2.6055975
## 5  2.8813178 -1.8964081 -1.2653487 -1.7839754 -2.872581  2.3033464
## 6  0.4437973  0.6599325 -1.4029555 -2.3118258 -1.792232  1.3934380
##       stat197    stat198    stat199    stat200    stat201    stat202
## 1 -0.83056986  0.9550526 -1.7025776 -2.8263099 -0.7023998  0.2272806
## 2 -1.42178249 -1.2471864  2.5723093 -0.0233496 -1.8975239  1.9472262
## 3 -0.19233958 -0.5161456  0.0279946 -1.2333704 -2.9672263 -2.8666208
## 4 -1.23145902  1.4728470 -0.4562025 -2.2983441 -1.5101184  0.2530525
## 5  1.85018563 -1.8269292 -0.6337969 -2.1473246  0.9909850  1.0950903
## 6 -0.09311061  0.5144456 -2.8178268 -2.7555969 -2.3546004 -1.0558939
##        stat203      stat204    stat205    stat206    stat207    stat208
## 1  1.166631220  0.007453276  2.9961641  1.5327307 -2.2293356 -0.9946009
## 2 -0.235396504  2.132749800  0.3707606  1.5604026 -1.0089217  2.1474257
## 3  0.003180946  2.229793310  2.7354040  0.8992231  2.9694967  2.3081024
## 4 -0.474482715 -1.584772230 -2.3224132 -0.9409741 -2.3179255  0.8032548
## 5  2.349412920 -1.276320220 -2.0203719 -1.1733509  1.0371852 -2.5086207
## 6  0.727436960 -0.960191786 -0.8964998 -1.6406623 -0.2330488  1.7993879
##      stat209    stat210   stat211    stat212    stat213    stat214
## 1 -2.2182105 -1.4099774 -1.656754  2.6602585 -2.9270992  1.1240714
## 2 -2.8932488 -1.1641679 -2.605423 -1.5650513  2.9523673  2.0266318
## 3 -1.8279589  0.0472350 -2.026734  2.5054367  0.9903042  0.3274105
## 4 -1.0878067  0.1171303  2.645891 -1.6775225  1.3452160  1.4694063
## 5 -0.8158175  0.4060950  0.912256  0.2925677  2.1610141  0.5679936
## 6 -2.2664354 -0.2061083 -1.435174  2.6645632  0.4216259 -0.6419122
##      stat215    stat216    stat217
## 1 -2.7510750 -0.5501796  1.2638469
## 2  2.8934650 -2.4099574 -1.2411407
## 3 -1.0947676  1.2852937  1.5411530
## 4  0.6343777  0.1345372  2.9102673
## 5  0.9908702  1.7909757 -2.0902610
## 6 -2.8113887 -1.0624912  0.2765074
features = features.highprec
#str(features) 

Checking correlations to evaluate removal of redundant features

corr.matrix = round(cor(features[sapply(features, is.numeric)]),2)

# filter out only highly correlated variables
threshold = 0.6
corr.matrix.tmp = corr.matrix
diag(corr.matrix.tmp) = 0
high.corr = apply(abs(corr.matrix.tmp) >= threshold, 1, any)
high.corr.matrix = corr.matrix.tmp[high.corr, high.corr]

DT::datatable(corr.matrix)
DT::datatable(high.corr.matrix)

Feature Names

feature.names = colnames(features)
drops <- c('JobName')
feature.names = feature.names[!(feature.names %in% drops)]
#str(feature.names)

Read and Clean Labels

labels = read.csv("../../Data/labels.csv")
#str(labels)
labels = labels[,c("JobName", output.var)]
summary(labels)
##       JobName           y3        
##  Job_00001:   1   Min.   : 95.91  
##  Job_00002:   1   1st Qu.:118.21  
##  Job_00003:   1   Median :123.99  
##  Job_00004:   1   Mean   :125.36  
##  Job_00005:   1   3rd Qu.:131.06  
##  Job_00006:   1   Max.   :193.73  
##  (Other)  :9994   NA's   :2497

Merge Datasets

data <- merge(features, labels, by = 'JobName')
drops <- c('JobName')
data = data[,(!colnames(data) %in% drops)]
#str(data)

Transformations

if (transform.abs == TRUE){
  data[,label.names] = 10^(data[,label.names]/20)
  data = filter(data, y3 < 1E7)
}


#str(data)
if (log.pred == TRUE){
  data[label.names] = log(data[alt.scale.label.name],10)
  
  drops = c(alt.scale.label.name)
  data = data[!(names(data) %in% drops)]
}
if (norm.pred){
  t=bestNormalize::bestNormalize(data[[alt.scale.label.name]])
  data[label.names] = predict(t)
  
  drops = c(alt.scale.label.name)
  data = data[!(names(data) %in% drops)]
}
## Warning in orderNorm(standardize = TRUE, warn = TRUE, x = c(121.2556129, : Ties in data, Normal distribution not guaranteed
#str(data)

Remove NA Cases

data = data[complete.cases(data),]

Exploratory Data Analysis

Check correlation of Label with Featires

if (eda == TRUE){
  corr.to.label =round(cor(dplyr::select(data,-one_of(label.names)),dplyr::select_at(data,label.names)),4)
  DT::datatable(corr.to.label)
}

Multicollinearity - VIF

if (eda == TRUE){
  vifDF = usdm::vif(select_at(data,feature.names)) %>% arrange(desc(VIF))
  head(vifDF,10)
}

Scatterplots

panel.hist <- function(x, ...)
{
    usr <- par("usr"); on.exit(par(usr))
    par(usr = c(usr[1:2], 0, 1.5) )
    h <- hist(x, plot = FALSE)
    breaks <- h$breaks; nB <- length(breaks)
    y <- h$counts; y <- y/max(y)
    rect(breaks[-nB], 0, breaks[-1], y, col = "cyan", ...)
}
if (eda == TRUE){
  histogram(data[ ,label.names])
  #hist(data[complete.cases(data),alt.scale.label.name])
}
# https://stackoverflow.com/questions/24648729/plot-one-numeric-variable-against-n-numeric-variables-in-n-plots
ind.pairs.plot <- function(data, xvars=NULL, yvar)
{
    df <- data
    if (is.null(xvars)) {
        xvars = names(data[which(names(data)!=yvar)])       
    }   

    #choose a format to display charts
    ncharts <- length(xvars) 
    
    for(i in 1:ncharts){    
        plot(df[,xvars[i]],df[,yvar], xlab = xvars[i], ylab = yvar)
    }
}

if (eda == TRUE){
  ind.pairs.plot(data, feature.names, label.names)
}

# 
# pl <- ggplot(data, aes(x=x18, y = y3))
# pl2 <- pl + geom_point(aes(alpha = 0.1)) # default color gradient based on 'hp'
# print(pl2)

Feature Engineering

if(eda ==FALSE){
  # x18 may need transformations
  plot(data[,'x18'], data[,label.names], main = "Original Scatter Plot vs. x18", ylab = label.names, xlab = 'x18')
  plot(sqrt(data[,'x18']), data[,label.names], main = "Original Scatter Plot vs. sqrt(x18)", ylab = label.names, xlab = 'sqrt(x18)')
  
  # transforming x18
  data$sqrt.x18 = sqrt(data$x18)
  data = dplyr::select(data,-one_of('x18'))
  
  # what about x7, x9?
  # x11 looks like data is at discrete points after a while. Will this be a problem?
}

Modeling

Train Test Split

data = data[sample(nrow(data)),] # randomly shuffle data
split = sample.split(data[,label.names], SplitRatio = 0.8)

data.train = subset(data, split == TRUE)
data.test = subset(data, split == FALSE)

Common Functions

plot.diagnostics <-  function(model, train) {
  plot(model)
  
  residuals = resid(model) # Plotted above in plot(lm.out)
  r.standard = rstandard(model)
  r.student = rstudent(model)

  plot(predict(model,train),r.student,
      ylab="Student Residuals", xlab="Predicted Values", 
      main="Student Residual Plot") 
  abline(0, 0)
  
  plot(predict(model, train),r.standard,
      ylab="Standard Residuals", xlab="Predicted Values", 
      main="Standard Residual Plot") 
  abline(0, 0)
  abline(2, 0)
  abline(-2, 0)
  
  # Histogram
  hist(r.student, freq=FALSE, main="Distribution of Studentized Residuals", 
  xlab="Studentized Residuals", ylab="Density", ylim=c(0,0.5))

  # Create range of x-values for normal curve
  xfit <- seq(min(r.student)-1, max(r.student)+1, length=40)

  # Generate values from the normal distribution at the specified values
  yfit <- (dnorm(xfit))

  # Add the normal curve
  lines(xfit, yfit, ylim=c(0,0.5))
  
  
  # http://www.stat.columbia.edu/~martin/W2024/R7.pdf
  # Influential plots
  inf.meas = influence.measures(model)
  # print (summary(inf.meas)) # too much data
  
  # Leverage plot
  lev = hat(model.matrix(model))
  plot(lev, ylab = 'Leverage - check')
  
  # Cook's Distance
  cd = cooks.distance(model)
  plot(cd,ylab="Cooks distances")
  abline(4/nrow(train),0)
  abline(1,0)
  
  print (paste("Number of data points that have Cook's D > 4/n: ", length(cd[cd > 4/nrow(train)]), sep = "")) 
  print (paste("Number of data points that have Cook's D > 1: ", length(cd[cd > 1]), sep = "")) 
  return(cd)
}

train.caret.glmselect = function(formula, data, method
                                 ,subopt = NULL, feature.names
                                 , train.control = NULL, tune.grid = NULL, pre.proc = NULL){
  
  if(is.null(train.control)){
    train.control <- trainControl(method = "cv"
                              ,number = 10
                              ,search = "grid"
                              ,verboseIter = TRUE
                              ,allowParallel = TRUE
                              )
  }
  
  if(is.null(tune.grid)){
    if (method == 'leapForward' | method == 'leapBackward' | method == 'leapSeq'){
      tune.grid = data.frame(nvmax = 1:length(feature.names))
    }
    if (method == 'glmnet' && subopt == 'LASSO'){
      # Will only show 1 Lambda value during training, but that is OK
      # https://stackoverflow.com/questions/47526544/why-need-to-tune-lambda-with-carettrain-method-glmnet-and-cv-glmnet
      # Another option for LASSO is this: https://github.com/topepo/caret/blob/master/RegressionTests/Code/lasso.R
      lambda = 10^seq(-2,0, length =100)
      alpha = c(1)
      tune.grid = expand.grid(alpha = alpha,lambda = lambda)
    }
    if (method == 'lars'){
      # https://github.com/topepo/caret/blob/master/RegressionTests/Code/lars.R
      fraction = seq(0, 1, length = 100)
      tune.grid = expand.grid(fraction = fraction)
      pre.proc = c("center", "scale") 
    }
  }
  
  # http://sshaikh.org/2015/05/06/parallelize-machine-learning-in-r-with-multi-core-cpus/
  cl <- makeCluster(detectCores()*0.75) # use 75% of cores only, leave rest for other tasks
  registerDoParallel(cl)

  set.seed(1) 
  # note that the seed has to actually be set just before this function is called
  # settign is above just not ensure reproducibility for some reason
  model.caret <- caret::train(formula
                              , data = data
                              , method = method
                              , tuneGrid = tune.grid
                              , trControl = train.control
                              , preProc = pre.proc
                              )
  
  stopCluster(cl)
  registerDoSEQ() # register sequential engine in case you are not using this function anymore
  
  if (method == 'leapForward' | method == 'leapBackward' | method == 'leapSeq'){
    print(model.caret$results) # all model results
    print(model.caret$bestTune) # best model
  
    model = model.caret$finalModel

    # Residuals Plot  MMORO #
    # leap function doens support studentized residuals
    dataPlot=data.frame(pred=predict(model.caret,data),res=resid(model.caret))
    residPlot = ggplot(dataPlot,aes(x=pred,y=res)) +
      geom_point(color='light blue',alpha=0.7) +
      geom_smooth()+
      theme_light()
    plot(residPlot)
   
    residHistogram = ggplot(dataPlot,aes(x=res)) +
      geom_histogram(aes(y=..density..),fill='light blue',alpha=1) +
      geom_density(color='lightblue4') + 
      theme_light()
    plot(residHistogram)
    
    # Provides the coefficients of the best model
    id = rownames(model.caret$bestTune)
    message("Coefficients of final model:")
    print (coef(model, id = id))
    # Need to find alternate to plotting diagnostic plots
    # plot.diagnostics(model.forward,data.train)
    # plot(model.forward,labels = colnames(data.train),scale=c("bic")) ## too many variables
    return(list(model = model,id = id,residPlot = residPlot ,residHistogram=residHistogram))
  }
  if (method == 'glmnet' && subopt == 'LASSO'){
    print(model.caret)
    print(plot(model.caret))
    print(model.caret$bestTune)
    
    # Metrics Plot MMORO#
    dataPlot = cbind(model.caret$results, id=as.numeric(rownames(model.caret$results))) %>%
      gather(key='metric',value='value',-id) %>%
      dplyr::filter(metric %in% c('MAE','RMSE','Rsquared'))
    metricsPlot = ggplot(data=dataPlot,aes(x=id,y=value) ) +
      geom_line(color='lightblue4') +
      geom_point(color='blue',alpha=0.7,size=.9) +
      facet_wrap(~metric,ncol=4,scales='free_y')+
      theme_light()
    plot(metricsPlot)
    
    # Residuals Plot MMORO#
    dataPlot=data.frame(pred=predict(model.caret,data),res=resid(model.caret))
    residPlot = ggplot(dataPlot,aes(x=pred,y=res)) +
      geom_point(color='light blue',alpha=0.7) +
      geom_smooth()+
      theme_light()
    plot(residPlot)
    
    residHistogram = ggplot(dataPlot,aes(x=res)) +
      geom_histogram(aes(y=..density..),fill='light blue',alpha=1) +
      geom_density(color='lightblue4') + 
      theme_light()
    plot(residHistogram)

        id = NULL # not really needed but added for consistency
    return(list(model = model.caret,id = id,residPlot = residPlot,metricsPlot=metricsPlot ))
  }
  if (method == 'lars'){
    print(model.caret)
    print(plot(model.caret))
    print(model.caret$bestTune)
    
    # Residuals Plot  MMORO#
    dataPlot=data.frame(pred=predict(model.caret,data),res=resid(model.caret))
    residPlot = ggplot(dataPlot,aes(x=pred,y=res)) +
      geom_point(color='light blue',alpha=0.7) +
      geom_smooth()+
      theme_light()
    plot(residPlot)

    residHistogram = ggplot(dataPlot,aes(x=res)) +
      geom_histogram(aes(y=..density..),fill='light blue',alpha=1) +
      geom_density(color='lightblue4') + 
      theme_light()
    plot(residHistogram)

    id = NULL # not really needed but added for consistency
    return(list(model = model.caret,id = id,residPlot = residPlot ,residHistogram=residHistogram))
  }
}

# https://stackoverflow.com/questions/48265743/linear-model-subset-selection-goodness-of-fit-with-k-fold-cross-validation
# changes slightly since call[[2]] was just returning "formula" without actually returnign the value in formula
predict.regsubsets <- function(object, newdata, id, formula, ...) {
    #form <- as.formula(object$call[[2]])
    mat <- model.matrix(formula, newdata) # adds intercept and expands any interaction terms
    coefi <- coef(object, id = id)
    xvars <- names(coefi)
    return(mat[,xvars]%*%coefi)
}
  
test.model = function(model, test, level=0.95
                      ,draw.limits = FALSE, good = 0.1, ok = 0.15
                      ,method = NULL, subopt = NULL
                      ,id = NULL, formula, feature.names, label.names){
  ## if using caret for glm select equivalent functionality, 
  ## need to set regsubset = TRUE, pass id of best model through id variable, 
  ## and pass formula (full is ok as it will select subset of variables from there)
  if (is.null(method)){
    pred = predict(model, newdata=test, interval="confidence", level = level) 
  }
  
  if (method == 'leapForward' | method == 'leapBackward' | method == 'leapSeq'){
    pred = predict.regsubsets(model, newdata = test, id = id, formula = formula)
  }
  
  if (method == 'glmnet' && subopt == 'LASSO'){
    xtest = as.matrix(test[,feature.names]) 
    pred=as.data.frame(predict(model, xtest))
  }
  
  if (method == 'lars'){
    pred=as.data.frame(predict(model, newdata = test))
  }
    
  # Summary of predicted values
  print ("Summary of predicted values: ")
  print(summary(pred[,1]))

  test.mse = mean((test[,label.names]-pred[,1])^2)
  print (paste(method, subopt, "Test MSE:", test.mse, sep=" "))

  plot(test[,label.names],pred[,1],xlab = "Actual", ylab = "Predicted")
  abline(0,(1+good),col='green', lwd = 3)
  abline(0,(1-good),col='green', lwd = 3)
  abline(0,(1+ok),col='blue', lwd = 3)
  abline(0,(1-ok),col='blue', lwd = 3)
  
}

Setup Formulae

n <- names(data.train)
formula <- as.formula(paste(paste(n[n %in% label.names], collapse = " + ")," ~", paste(n[!n %in% label.names], collapse = " + "))) 
grand.mean.formula = as.formula(paste(paste(n[n %in% label.names], collapse = " + ")," ~ 1"))
print(formula)
## norm.y3 ~ x1 + x2 + x3 + x4 + x5 + x6 + x7 + x8 + x9 + x10 + 
##     x11 + x12 + x13 + x14 + x15 + x16 + x17 + x19 + x20 + x21 + 
##     x22 + x23 + stat1 + stat2 + stat3 + stat4 + stat5 + stat6 + 
##     stat7 + stat8 + stat9 + stat10 + stat11 + stat12 + stat13 + 
##     stat14 + stat15 + stat16 + stat17 + stat18 + stat19 + stat20 + 
##     stat21 + stat22 + stat23 + stat24 + stat25 + stat26 + stat27 + 
##     stat28 + stat29 + stat30 + stat31 + stat32 + stat33 + stat34 + 
##     stat35 + stat36 + stat37 + stat38 + stat39 + stat40 + stat41 + 
##     stat42 + stat43 + stat44 + stat45 + stat46 + stat47 + stat48 + 
##     stat49 + stat50 + stat51 + stat52 + stat53 + stat54 + stat55 + 
##     stat56 + stat57 + stat58 + stat59 + stat60 + stat61 + stat62 + 
##     stat63 + stat64 + stat65 + stat66 + stat67 + stat68 + stat69 + 
##     stat70 + stat71 + stat72 + stat73 + stat74 + stat75 + stat76 + 
##     stat77 + stat78 + stat79 + stat80 + stat81 + stat82 + stat83 + 
##     stat84 + stat85 + stat86 + stat87 + stat88 + stat89 + stat90 + 
##     stat91 + stat92 + stat93 + stat94 + stat95 + stat96 + stat97 + 
##     stat98 + stat99 + stat100 + stat101 + stat102 + stat103 + 
##     stat104 + stat105 + stat106 + stat107 + stat108 + stat109 + 
##     stat110 + stat111 + stat112 + stat113 + stat114 + stat115 + 
##     stat116 + stat117 + stat118 + stat119 + stat120 + stat121 + 
##     stat122 + stat123 + stat124 + stat125 + stat126 + stat127 + 
##     stat128 + stat129 + stat130 + stat131 + stat132 + stat133 + 
##     stat134 + stat135 + stat136 + stat137 + stat138 + stat139 + 
##     stat140 + stat141 + stat142 + stat143 + stat144 + stat145 + 
##     stat146 + stat147 + stat148 + stat149 + stat150 + stat151 + 
##     stat152 + stat153 + stat154 + stat155 + stat156 + stat157 + 
##     stat158 + stat159 + stat160 + stat161 + stat162 + stat163 + 
##     stat164 + stat165 + stat166 + stat167 + stat168 + stat169 + 
##     stat170 + stat171 + stat172 + stat173 + stat174 + stat175 + 
##     stat176 + stat177 + stat178 + stat179 + stat180 + stat181 + 
##     stat182 + stat183 + stat184 + stat185 + stat186 + stat187 + 
##     stat188 + stat189 + stat190 + stat191 + stat192 + stat193 + 
##     stat194 + stat195 + stat196 + stat197 + stat198 + stat199 + 
##     stat200 + stat201 + stat202 + stat203 + stat204 + stat205 + 
##     stat206 + stat207 + stat208 + stat209 + stat210 + stat211 + 
##     stat212 + stat213 + stat214 + stat215 + stat216 + stat217 + 
##     sqrt.x18
print(grand.mean.formula)
## norm.y3 ~ 1
# Update feature.names because we may have transformed some features
feature.names = n[!n %in% label.names]

Full & Grand Means Model

model.full = lm(formula , data.train)
summary(model.full)
## 
## Call:
## lm(formula = formula, data = data.train)
## 
## Residuals:
##     Min      1Q  Median      3Q     Max 
## -2.5824 -0.5814 -0.0776  0.5202  3.8650 
## 
## Coefficients:
##               Estimate Std. Error t value Pr(>|t|)    
## (Intercept) -3.925e+00  2.509e-01 -15.644  < 2e-16 ***
## x1          -8.424e-03  1.709e-02  -0.493 0.622160    
## x2           1.604e-03  1.092e-02   0.147 0.883262    
## x3           2.750e-03  2.991e-03   0.919 0.357971    
## x4          -1.296e-03  2.359e-04  -5.495 4.07e-08 ***
## x5           8.787e-03  7.765e-03   1.132 0.257820    
## x6          -3.845e-03  1.564e-02  -0.246 0.805832    
## x7           3.575e-01  1.671e-02  21.393  < 2e-16 ***
## x8           1.221e-02  3.868e-03   3.156 0.001606 ** 
## x9           1.027e-01  8.660e-03  11.862  < 2e-16 ***
## x10          3.600e-02  8.080e-03   4.456 8.51e-06 ***
## x11          6.353e+06  1.935e+06   3.283 0.001033 ** 
## x12         -2.358e-03  4.886e-03  -0.483 0.629407    
## x13          3.475e-03  1.970e-03   1.764 0.077761 .  
## x14         -1.055e-02  8.454e-03  -1.248 0.211899    
## x15          4.236e-03  8.068e-03   0.525 0.599540    
## x16          2.648e-02  5.587e-03   4.739 2.20e-06 ***
## x17          3.891e-02  8.518e-03   4.568 5.03e-06 ***
## x19          6.818e-03  4.340e-03   1.571 0.116279    
## x20         -2.677e-02  3.007e-02  -0.890 0.373278    
## x21          3.683e-03  1.113e-03   3.310 0.000939 ***
## x22         -1.278e-02  9.037e-03  -1.414 0.157404    
## x23          5.750e-03  8.567e-03   0.671 0.502136    
## stat1        5.306e-04  6.498e-03   0.082 0.934913    
## stat2        4.961e-03  6.431e-03   0.771 0.440480    
## stat3        1.425e-02  6.508e-03   2.190 0.028584 *  
## stat4       -1.443e-02  6.522e-03  -2.213 0.026933 *  
## stat5       -4.770e-03  6.548e-03  -0.728 0.466415    
## stat6       -2.974e-03  6.478e-03  -0.459 0.646212    
## stat7       -7.498e-03  6.479e-03  -1.157 0.247200    
## stat8        6.005e-03  6.471e-03   0.928 0.353476    
## stat9        9.361e-05  6.469e-03   0.014 0.988455    
## stat10      -6.550e-03  6.506e-03  -1.007 0.314061    
## stat11      -8.064e-03  6.564e-03  -1.228 0.219350    
## stat12       1.481e-03  6.498e-03   0.228 0.819741    
## stat13      -8.888e-03  6.479e-03  -1.372 0.170157    
## stat14      -3.082e-02  6.448e-03  -4.779 1.80e-06 ***
## stat15      -1.078e-02  6.453e-03  -1.670 0.094947 .  
## stat16      -7.102e-04  6.485e-03  -0.110 0.912792    
## stat17      -5.881e-04  6.441e-03  -0.091 0.927252    
## stat18      -4.467e-03  6.448e-03  -0.693 0.488493    
## stat19       3.606e-03  6.464e-03   0.558 0.576971    
## stat20      -6.463e-03  6.465e-03  -1.000 0.317468    
## stat21      -2.137e-04  6.499e-03  -0.033 0.973764    
## stat22      -5.384e-03  6.490e-03  -0.830 0.406822    
## stat23       1.421e-02  6.455e-03   2.201 0.027742 *  
## stat24      -9.758e-03  6.494e-03  -1.503 0.132986    
## stat25      -9.751e-03  6.477e-03  -1.505 0.132282    
## stat26      -1.042e-02  6.469e-03  -1.611 0.107240    
## stat27       5.590e-04  6.531e-03   0.086 0.931788    
## stat28       6.646e-03  6.480e-03   1.026 0.305141    
## stat29       4.429e-03  6.522e-03   0.679 0.497093    
## stat30       5.488e-03  6.515e-03   0.842 0.399678    
## stat31      -4.260e-03  6.512e-03  -0.654 0.513070    
## stat32       1.588e-04  6.510e-03   0.024 0.980540    
## stat33      -1.319e-02  6.463e-03  -2.040 0.041358 *  
## stat34       3.860e-03  6.453e-03   0.598 0.549740    
## stat35      -1.180e-02  6.509e-03  -1.813 0.069811 .  
## stat36       5.037e-03  6.452e-03   0.781 0.435055    
## stat37      -2.973e-03  6.536e-03  -0.455 0.649256    
## stat38       1.348e-02  6.535e-03   2.063 0.039192 *  
## stat39      -7.249e-03  6.440e-03  -1.126 0.260327    
## stat40      -2.776e-03  6.463e-03  -0.430 0.667553    
## stat41      -1.795e-02  6.437e-03  -2.789 0.005298 ** 
## stat42      -3.767e-03  6.487e-03  -0.581 0.561510    
## stat43      -3.641e-03  6.477e-03  -0.562 0.574079    
## stat44       4.024e-03  6.457e-03   0.623 0.533202    
## stat45      -1.040e-02  6.468e-03  -1.608 0.107959    
## stat46       9.085e-03  6.464e-03   1.405 0.159935    
## stat47       2.750e-03  6.525e-03   0.422 0.673379    
## stat48       1.249e-02  6.475e-03   1.929 0.053730 .  
## stat49       5.456e-03  6.414e-03   0.851 0.395006    
## stat50       4.527e-03  6.455e-03   0.701 0.483114    
## stat51       1.378e-03  6.438e-03   0.214 0.830466    
## stat52       1.929e-03  6.477e-03   0.298 0.765802    
## stat53      -3.893e-03  6.523e-03  -0.597 0.550676    
## stat54      -8.396e-03  6.512e-03  -1.289 0.197309    
## stat55       5.636e-03  6.430e-03   0.876 0.380812    
## stat56      -4.010e-03  6.535e-03  -0.614 0.539460    
## stat57       5.938e-03  6.439e-03   0.922 0.356521    
## stat58       2.603e-03  6.424e-03   0.405 0.685293    
## stat59       4.149e-03  6.492e-03   0.639 0.522785    
## stat60       1.136e-02  6.471e-03   1.756 0.079196 .  
## stat61      -6.374e-03  6.497e-03  -0.981 0.326573    
## stat62      -9.863e-03  6.457e-03  -1.527 0.126714    
## stat63       5.283e-03  6.516e-03   0.811 0.417539    
## stat64      -9.204e-04  6.455e-03  -0.143 0.886624    
## stat65      -7.122e-03  6.497e-03  -1.096 0.273007    
## stat66       6.816e-03  6.581e-03   1.036 0.300421    
## stat67       1.034e-03  6.543e-03   0.158 0.874413    
## stat68      -2.636e-03  6.524e-03  -0.404 0.686219    
## stat69      -2.269e-03  6.479e-03  -0.350 0.726127    
## stat70       6.023e-03  6.420e-03   0.938 0.348155    
## stat71       3.863e-03  6.455e-03   0.598 0.549566    
## stat72       3.487e-03  6.511e-03   0.536 0.592305    
## stat73       8.544e-03  6.533e-03   1.308 0.191006    
## stat74      -7.103e-03  6.489e-03  -1.095 0.273709    
## stat75      -5.151e-03  6.515e-03  -0.791 0.429194    
## stat76       4.510e-03  6.533e-03   0.690 0.490052    
## stat77      -6.357e-03  6.472e-03  -0.982 0.326064    
## stat78      -3.308e-03  6.478e-03  -0.511 0.609656    
## stat79      -1.455e-03  6.471e-03  -0.225 0.822111    
## stat80       7.024e-03  6.516e-03   1.078 0.281114    
## stat81       6.587e-03  6.565e-03   1.003 0.315724    
## stat82       1.606e-03  6.474e-03   0.248 0.804099    
## stat83      -8.355e-03  6.471e-03  -1.291 0.196680    
## stat84      -2.440e-04  6.510e-03  -0.037 0.970098    
## stat85      -7.771e-03  6.466e-03  -1.202 0.229481    
## stat86      -4.164e-04  6.494e-03  -0.064 0.948885    
## stat87      -9.079e-03  6.523e-03  -1.392 0.164067    
## stat88      -5.612e-03  6.447e-03  -0.870 0.384074    
## stat89      -6.792e-03  6.447e-03  -1.053 0.292162    
## stat90      -1.119e-02  6.489e-03  -1.725 0.084559 .  
## stat91      -1.280e-02  6.417e-03  -1.994 0.046153 *  
## stat92      -1.237e-02  6.504e-03  -1.901 0.057328 .  
## stat93      -2.206e-03  6.536e-03  -0.338 0.735737    
## stat94      -4.053e-03  6.484e-03  -0.625 0.531981    
## stat95      -6.190e-04  6.486e-03  -0.095 0.923977    
## stat96      -6.535e-03  6.463e-03  -1.011 0.311972    
## stat97      -7.771e-04  6.450e-03  -0.120 0.904104    
## stat98       1.008e-01  6.400e-03  15.745  < 2e-16 ***
## stat99       9.946e-03  6.518e-03   1.526 0.127055    
## stat100      1.822e-02  6.512e-03   2.797 0.005172 ** 
## stat101     -5.475e-03  6.510e-03  -0.841 0.400314    
## stat102      2.384e-03  6.514e-03   0.366 0.714359    
## stat103     -1.011e-02  6.574e-03  -1.538 0.124173    
## stat104     -3.356e-03  6.493e-03  -0.517 0.605242    
## stat105      9.360e-03  6.409e-03   1.461 0.144179    
## stat106     -7.519e-03  6.465e-03  -1.163 0.244821    
## stat107     -2.934e-03  6.494e-03  -0.452 0.651389    
## stat108     -9.204e-03  6.477e-03  -1.421 0.155327    
## stat109      2.381e-03  6.480e-03   0.367 0.713312    
## stat110     -9.748e-02  6.450e-03 -15.112  < 2e-16 ***
## stat111      8.153e-04  6.479e-03   0.126 0.899865    
## stat112     -1.522e-03  6.510e-03  -0.234 0.815174    
## stat113     -2.222e-03  6.533e-03  -0.340 0.733817    
## stat114     -1.945e-03  6.484e-03  -0.300 0.764223    
## stat115      1.023e-03  6.458e-03   0.158 0.874110    
## stat116      1.614e-03  6.504e-03   0.248 0.804026    
## stat117      3.608e-03  6.476e-03   0.557 0.577495    
## stat118     -3.648e-03  6.442e-03  -0.566 0.571298    
## stat119      4.460e-03  6.519e-03   0.684 0.493931    
## stat120      2.716e-03  6.419e-03   0.423 0.672243    
## stat121     -5.138e-03  6.486e-03  -0.792 0.428309    
## stat122      1.026e-03  6.448e-03   0.159 0.873608    
## stat123     -2.169e-03  6.577e-03  -0.330 0.741575    
## stat124     -3.409e-03  6.457e-03  -0.528 0.597535    
## stat125      1.659e-03  6.501e-03   0.255 0.798525    
## stat126      6.894e-03  6.467e-03   1.066 0.286460    
## stat127     -2.753e-04  6.459e-03  -0.043 0.966005    
## stat128     -4.108e-03  6.472e-03  -0.635 0.525641    
## stat129      2.687e-03  6.475e-03   0.415 0.678167    
## stat130      1.443e-03  6.506e-03   0.222 0.824457    
## stat131     -1.149e-03  6.498e-03  -0.177 0.859643    
## stat132     -6.792e-03  6.484e-03  -1.047 0.294955    
## stat133      4.523e-03  6.510e-03   0.695 0.487259    
## stat134     -1.086e-02  6.438e-03  -1.688 0.091548 .  
## stat135      1.683e-03  6.482e-03   0.260 0.795169    
## stat136      6.884e-03  6.516e-03   1.056 0.290808    
## stat137     -4.061e-03  6.425e-03  -0.632 0.527400    
## stat138     -7.573e-04  6.470e-03  -0.117 0.906837    
## stat139      5.477e-03  6.509e-03   0.841 0.400123    
## stat140      3.217e-03  6.467e-03   0.497 0.618950    
## stat141      5.582e-03  6.461e-03   0.864 0.387659    
## stat142     -3.831e-03  6.523e-03  -0.587 0.557071    
## stat143      7.471e-04  6.481e-03   0.115 0.908230    
## stat144      1.057e-02  6.461e-03   1.636 0.101865    
## stat145     -3.591e-03  6.540e-03  -0.549 0.583016    
## stat146     -1.198e-02  6.496e-03  -1.845 0.065088 .  
## stat147     -1.087e-02  6.579e-03  -1.653 0.098378 .  
## stat148     -5.762e-03  6.410e-03  -0.899 0.368767    
## stat149     -1.406e-02  6.555e-03  -2.145 0.032034 *  
## stat150      1.900e-03  6.503e-03   0.292 0.770215    
## stat151     -9.335e-03  6.566e-03  -1.422 0.155156    
## stat152     -5.659e-03  6.473e-03  -0.874 0.382049    
## stat153      4.693e-03  6.578e-03   0.713 0.475618    
## stat154      4.456e-04  6.544e-03   0.068 0.945715    
## stat155     -3.110e-03  6.475e-03  -0.480 0.631050    
## stat156      1.176e-02  6.469e-03   1.818 0.069102 .  
## stat157      4.279e-03  6.447e-03   0.664 0.506869    
## stat158     -4.047e-03  6.592e-03  -0.614 0.539295    
## stat159      5.362e-04  6.452e-03   0.083 0.933769    
## stat160      7.874e-04  6.546e-03   0.120 0.904267    
## stat161      8.361e-03  6.520e-03   1.282 0.199796    
## stat162      2.024e-04  6.442e-03   0.031 0.974938    
## stat163      5.648e-03  6.531e-03   0.865 0.387177    
## stat164      1.186e-02  6.539e-03   1.814 0.069757 .  
## stat165     -1.501e-03  6.472e-03  -0.232 0.816644    
## stat166     -1.019e-02  6.421e-03  -1.588 0.112437    
## stat167     -8.326e-03  6.475e-03  -1.286 0.198549    
## stat168     -2.159e-03  6.472e-03  -0.334 0.738672    
## stat169      7.502e-05  6.510e-03   0.012 0.990807    
## stat170     -1.537e-03  6.509e-03  -0.236 0.813387    
## stat171      5.019e-03  6.538e-03   0.768 0.442701    
## stat172      7.204e-03  6.475e-03   1.113 0.265966    
## stat173     -2.399e-03  6.498e-03  -0.369 0.711956    
## stat174     -3.187e-03  6.511e-03  -0.489 0.624537    
## stat175     -3.992e-03  6.509e-03  -0.613 0.539701    
## stat176      1.801e-03  6.466e-03   0.279 0.780605    
## stat177     -4.816e-03  6.498e-03  -0.741 0.458662    
## stat178     -5.633e-03  6.515e-03  -0.865 0.387320    
## stat179      9.150e-04  6.491e-03   0.141 0.887900    
## stat180     -4.688e-03  6.432e-03  -0.729 0.466122    
## stat181      4.089e-03  6.494e-03   0.630 0.528909    
## stat182      2.376e-03  6.547e-03   0.363 0.716690    
## stat183      8.397e-03  6.453e-03   1.301 0.193234    
## stat184      1.135e-04  6.524e-03   0.017 0.986121    
## stat185     -5.280e-04  6.444e-03  -0.082 0.934705    
## stat186      1.714e-03  6.522e-03   0.263 0.792727    
## stat187     -1.221e-02  6.451e-03  -1.893 0.058448 .  
## stat188     -5.587e-03  6.463e-03  -0.864 0.387373    
## stat189      2.097e-03  6.495e-03   0.323 0.746862    
## stat190     -2.773e-03  6.456e-03  -0.429 0.667589    
## stat191     -8.789e-03  6.500e-03  -1.352 0.176367    
## stat192      1.593e-03  6.555e-03   0.243 0.808032    
## stat193     -7.033e-04  6.545e-03  -0.107 0.914436    
## stat194      4.268e-04  6.475e-03   0.066 0.947447    
## stat195      1.011e-02  6.485e-03   1.560 0.118920    
## stat196     -2.457e-03  6.568e-03  -0.374 0.708412    
## stat197      6.726e-03  6.431e-03   1.046 0.295651    
## stat198     -1.275e-02  6.478e-03  -1.968 0.049123 *  
## stat199      6.429e-03  6.446e-03   0.997 0.318574    
## stat200     -5.881e-03  6.411e-03  -0.917 0.358979    
## stat201     -4.094e-03  6.457e-03  -0.634 0.526075    
## stat202     -1.800e-03  6.559e-03  -0.274 0.783768    
## stat203      4.578e-03  6.501e-03   0.704 0.481387    
## stat204     -1.071e-02  6.469e-03  -1.656 0.097739 .  
## stat205     -7.346e-03  6.464e-03  -1.137 0.255774    
## stat206     -6.028e-03  6.520e-03  -0.925 0.355220    
## stat207      6.624e-03  6.494e-03   1.020 0.307799    
## stat208      1.594e-03  6.484e-03   0.246 0.805759    
## stat209     -2.027e-03  6.454e-03  -0.314 0.753465    
## stat210     -5.345e-03  6.527e-03  -0.819 0.412879    
## stat211     -3.052e-03  6.451e-03  -0.473 0.636210    
## stat212      2.406e-03  6.515e-03   0.369 0.711924    
## stat213     -3.216e-03  6.520e-03  -0.493 0.621805    
## stat214     -1.220e-02  6.458e-03  -1.888 0.059012 .  
## stat215     -6.541e-03  6.483e-03  -1.009 0.312996    
## stat216      1.393e-03  6.488e-03   0.215 0.829983    
## stat217      1.054e-02  6.498e-03   1.622 0.104903    
## sqrt.x18     7.743e-01  2.478e-02  31.255  < 2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.8528 on 5761 degrees of freedom
## Multiple R-squared:  0.3018, Adjusted R-squared:  0.2727 
## F-statistic: 10.38 on 240 and 5761 DF,  p-value: < 2.2e-16
cd.full = plot.diagnostics(model.full, data.train)

## [1] "Number of data points that have Cook's D > 4/n: 308"
## [1] "Number of data points that have Cook's D > 1: 0"

Checking with removal of high influence points

high.cd = names(cd.full[cd.full > 4/nrow(data.train)])
data.train2 = data.train[!(rownames(data.train)) %in% high.cd,]
model.full2 = lm(formula , data.train2)
summary(model.full2)
## 
## Call:
## lm(formula = formula, data = data.train2)
## 
## Residuals:
##      Min       1Q   Median       3Q      Max 
## -1.69772 -0.51462 -0.04202  0.50328  1.90918 
## 
## Coefficients:
##               Estimate Std. Error t value Pr(>|t|)    
## (Intercept) -4.205e+00  2.193e-01 -19.178  < 2e-16 ***
## x1          -1.668e-02  1.495e-02  -1.116 0.264501    
## x2           4.813e-03  9.526e-03   0.505 0.613359    
## x3           2.646e-03  2.607e-03   1.015 0.310017    
## x4          -1.571e-03  2.062e-04  -7.617 3.03e-14 ***
## x5           1.597e-02  6.776e-03   2.357 0.018462 *  
## x6          -7.895e-03  1.364e-02  -0.579 0.562733    
## x7           3.857e-01  1.459e-02  26.428  < 2e-16 ***
## x8           1.178e-02  3.382e-03   3.484 0.000498 ***
## x9           1.040e-01  7.548e-03  13.775  < 2e-16 ***
## x10          4.481e-02  7.069e-03   6.339 2.50e-10 ***
## x11          6.478e+06  1.690e+06   3.834 0.000128 ***
## x12          7.822e-04  4.246e-03   0.184 0.853866    
## x13          4.498e-03  1.719e-03   2.616 0.008922 ** 
## x14         -1.284e-02  7.379e-03  -1.740 0.081955 .  
## x15          2.730e-03  7.044e-03   0.388 0.698396    
## x16          2.664e-02  4.882e-03   5.457 5.07e-08 ***
## x17          4.233e-02  7.440e-03   5.689 1.34e-08 ***
## x19          8.100e-03  3.794e-03   2.135 0.032832 *  
## x20         -1.754e-02  2.626e-02  -0.668 0.504131    
## x21          4.327e-03  9.703e-04   4.460 8.36e-06 ***
## x22         -1.846e-02  7.873e-03  -2.344 0.019089 *  
## x23          7.219e-03  7.489e-03   0.964 0.335146    
## stat1        7.278e-04  5.666e-03   0.128 0.897798    
## stat2        5.034e-03  5.606e-03   0.898 0.369282    
## stat3        1.545e-02  5.684e-03   2.718 0.006589 ** 
## stat4       -1.643e-02  5.697e-03  -2.885 0.003935 ** 
## stat5       -9.143e-03  5.726e-03  -1.597 0.110389    
## stat6       -4.596e-03  5.646e-03  -0.814 0.415670    
## stat7       -1.053e-02  5.635e-03  -1.869 0.061653 .  
## stat8        5.790e-03  5.647e-03   1.025 0.305230    
## stat9       -3.224e-04  5.653e-03  -0.057 0.954525    
## stat10      -4.072e-03  5.661e-03  -0.719 0.471957    
## stat11      -1.190e-02  5.724e-03  -2.080 0.037618 *  
## stat12       1.802e-03  5.671e-03   0.318 0.750644    
## stat13      -7.549e-03  5.659e-03  -1.334 0.182322    
## stat14      -3.528e-02  5.619e-03  -6.279 3.66e-10 ***
## stat15      -1.604e-02  5.643e-03  -2.843 0.004486 ** 
## stat16      -3.426e-03  5.656e-03  -0.606 0.544722    
## stat17      -1.397e-04  5.625e-03  -0.025 0.980192    
## stat18      -5.853e-03  5.618e-03  -1.042 0.297485    
## stat19       7.571e-04  5.657e-03   0.134 0.893536    
## stat20       4.142e-04  5.647e-03   0.073 0.941533    
## stat21       2.304e-03  5.668e-03   0.406 0.684445    
## stat22      -1.999e-03  5.665e-03  -0.353 0.724252    
## stat23       1.172e-02  5.640e-03   2.078 0.037764 *  
## stat24      -1.004e-02  5.665e-03  -1.771 0.076570 .  
## stat25      -1.043e-02  5.652e-03  -1.845 0.065126 .  
## stat26      -1.255e-02  5.652e-03  -2.220 0.026484 *  
## stat27       1.580e-03  5.719e-03   0.276 0.782358    
## stat28       2.182e-03  5.661e-03   0.385 0.699969    
## stat29       6.051e-03  5.688e-03   1.064 0.287433    
## stat30       4.618e-03  5.676e-03   0.814 0.415911    
## stat31      -3.400e-03  5.680e-03  -0.599 0.549484    
## stat32       1.068e-03  5.697e-03   0.187 0.851313    
## stat33      -1.224e-02  5.647e-03  -2.167 0.030313 *  
## stat34       7.613e-03  5.638e-03   1.350 0.176947    
## stat35      -1.314e-02  5.676e-03  -2.315 0.020647 *  
## stat36       6.747e-03  5.643e-03   1.196 0.231909    
## stat37      -6.245e-03  5.701e-03  -1.095 0.273392    
## stat38       1.613e-02  5.689e-03   2.836 0.004587 ** 
## stat39      -1.145e-02  5.604e-03  -2.043 0.041061 *  
## stat40       4.208e-04  5.645e-03   0.075 0.940583    
## stat41      -1.741e-02  5.618e-03  -3.099 0.001954 ** 
## stat42      -3.155e-03  5.669e-03  -0.556 0.577910    
## stat43      -4.673e-03  5.644e-03  -0.828 0.407692    
## stat44       6.983e-03  5.642e-03   1.238 0.215884    
## stat45      -8.337e-03  5.638e-03  -1.479 0.139299    
## stat46       4.736e-03  5.631e-03   0.841 0.400398    
## stat47       6.787e-03  5.695e-03   1.192 0.233403    
## stat48       1.034e-02  5.635e-03   1.835 0.066516 .  
## stat49      -4.763e-04  5.603e-03  -0.085 0.932247    
## stat50       1.481e-04  5.630e-03   0.026 0.979006    
## stat51       9.248e-04  5.623e-03   0.164 0.869375    
## stat52       7.163e-03  5.658e-03   1.266 0.205536    
## stat53      -2.502e-04  5.688e-03  -0.044 0.964916    
## stat54      -8.725e-03  5.686e-03  -1.534 0.125011    
## stat55       3.812e-03  5.601e-03   0.681 0.496181    
## stat56      -1.085e-03  5.701e-03  -0.190 0.849106    
## stat57       6.893e-03  5.629e-03   1.225 0.220787    
## stat58       2.476e-03  5.595e-03   0.442 0.658200    
## stat59       3.223e-03  5.655e-03   0.570 0.568781    
## stat60       1.471e-02  5.655e-03   2.601 0.009331 ** 
## stat61      -7.497e-03  5.669e-03  -1.322 0.186068    
## stat62      -1.266e-02  5.632e-03  -2.248 0.024642 *  
## stat63       6.482e-04  5.687e-03   0.114 0.909261    
## stat64       2.913e-03  5.633e-03   0.517 0.605053    
## stat65      -1.790e-03  5.678e-03  -0.315 0.752554    
## stat66       5.189e-03  5.743e-03   0.904 0.366203    
## stat67       5.909e-03  5.710e-03   1.035 0.300783    
## stat68      -5.228e-03  5.705e-03  -0.916 0.359457    
## stat69      -1.852e-03  5.668e-03  -0.327 0.743902    
## stat70       8.608e-03  5.601e-03   1.537 0.124372    
## stat71       5.698e-03  5.644e-03   1.010 0.312738    
## stat72      -4.614e-03  5.677e-03  -0.813 0.416475    
## stat73       8.586e-03  5.704e-03   1.505 0.132319    
## stat74      -5.344e-03  5.665e-03  -0.943 0.345513    
## stat75      -1.173e-03  5.681e-03  -0.206 0.836454    
## stat76       4.965e-03  5.694e-03   0.872 0.383269    
## stat77      -1.238e-03  5.655e-03  -0.219 0.826778    
## stat78      -9.090e-03  5.639e-03  -1.612 0.107001    
## stat79       9.345e-04  5.640e-03   0.166 0.868405    
## stat80       8.065e-03  5.694e-03   1.416 0.156758    
## stat81       2.785e-03  5.731e-03   0.486 0.627043    
## stat82      -1.287e-03  5.649e-03  -0.228 0.819845    
## stat83      -7.553e-03  5.647e-03  -1.337 0.181134    
## stat84      -2.943e-03  5.679e-03  -0.518 0.604343    
## stat85      -1.530e-02  5.642e-03  -2.713 0.006695 ** 
## stat86       2.250e-03  5.672e-03   0.397 0.691604    
## stat87      -6.908e-03  5.690e-03  -1.214 0.224742    
## stat88       2.217e-04  5.628e-03   0.039 0.968575    
## stat89      -3.752e-03  5.634e-03  -0.666 0.505486    
## stat90      -1.305e-02  5.661e-03  -2.306 0.021161 *  
## stat91      -1.320e-02  5.592e-03  -2.361 0.018250 *  
## stat92      -9.678e-03  5.671e-03  -1.707 0.087958 .  
## stat93      -3.247e-03  5.722e-03  -0.567 0.570422    
## stat94      -1.818e-04  5.651e-03  -0.032 0.974341    
## stat95       1.936e-03  5.658e-03   0.342 0.732261    
## stat96      -7.451e-03  5.632e-03  -1.323 0.185901    
## stat97       1.082e-03  5.632e-03   0.192 0.847598    
## stat98       1.073e-01  5.586e-03  19.205  < 2e-16 ***
## stat99       1.233e-02  5.685e-03   2.169 0.030132 *  
## stat100      2.196e-02  5.670e-03   3.873 0.000109 ***
## stat101     -4.710e-03  5.675e-03  -0.830 0.406584    
## stat102      2.789e-03  5.674e-03   0.492 0.623063    
## stat103     -1.390e-02  5.726e-03  -2.428 0.015198 *  
## stat104     -3.199e-03  5.680e-03  -0.563 0.573374    
## stat105      9.378e-03  5.599e-03   1.675 0.094014 .  
## stat106     -6.985e-03  5.654e-03  -1.235 0.216743    
## stat107     -4.511e-04  5.659e-03  -0.080 0.936476    
## stat108     -1.087e-02  5.662e-03  -1.921 0.054832 .  
## stat109     -1.123e-03  5.659e-03  -0.198 0.842762    
## stat110     -1.020e-01  5.632e-03 -18.115  < 2e-16 ***
## stat111     -4.601e-04  5.648e-03  -0.081 0.935077    
## stat112      2.455e-03  5.676e-03   0.433 0.665348    
## stat113     -9.441e-04  5.704e-03  -0.166 0.868550    
## stat114      3.618e-04  5.653e-03   0.064 0.948976    
## stat115      2.599e-03  5.638e-03   0.461 0.644844    
## stat116      6.369e-04  5.682e-03   0.112 0.910766    
## stat117      8.288e-03  5.641e-03   1.469 0.141849    
## stat118      2.390e-03  5.621e-03   0.425 0.670748    
## stat119      8.543e-03  5.687e-03   1.502 0.133124    
## stat120     -2.313e-03  5.593e-03  -0.414 0.679184    
## stat121     -5.616e-03  5.659e-03  -0.992 0.321046    
## stat122     -3.784e-03  5.638e-03  -0.671 0.502199    
## stat123      3.790e-03  5.743e-03   0.660 0.509376    
## stat124     -5.722e-03  5.639e-03  -1.015 0.310289    
## stat125     -1.847e-03  5.680e-03  -0.325 0.745082    
## stat126      7.792e-03  5.648e-03   1.380 0.167751    
## stat127     -2.125e-03  5.637e-03  -0.377 0.706198    
## stat128     -6.638e-03  5.637e-03  -1.178 0.239009    
## stat129     -7.598e-04  5.642e-03  -0.135 0.892893    
## stat130      4.827e-04  5.675e-03   0.085 0.932220    
## stat131      1.364e-03  5.665e-03   0.241 0.809774    
## stat132     -9.166e-03  5.659e-03  -1.620 0.105353    
## stat133      7.183e-03  5.707e-03   1.259 0.208240    
## stat134     -1.244e-02  5.621e-03  -2.212 0.026994 *  
## stat135     -1.738e-04  5.658e-03  -0.031 0.975497    
## stat136     -9.758e-04  5.680e-03  -0.172 0.863609    
## stat137     -1.223e-04  5.595e-03  -0.022 0.982562    
## stat138      1.082e-03  5.653e-03   0.191 0.848213    
## stat139      2.049e-03  5.676e-03   0.361 0.718157    
## stat140      3.304e-03  5.625e-03   0.587 0.557008    
## stat141      7.381e-03  5.629e-03   1.311 0.189836    
## stat142     -1.495e-03  5.683e-03  -0.263 0.792472    
## stat143     -2.851e-03  5.664e-03  -0.503 0.614681    
## stat144      9.254e-03  5.650e-03   1.638 0.101539    
## stat145     -2.484e-03  5.715e-03  -0.435 0.663901    
## stat146     -1.015e-02  5.666e-03  -1.791 0.073374 .  
## stat147     -1.572e-02  5.744e-03  -2.736 0.006232 ** 
## stat148     -9.445e-03  5.607e-03  -1.685 0.092121 .  
## stat149     -1.639e-02  5.734e-03  -2.859 0.004268 ** 
## stat150     -3.435e-03  5.684e-03  -0.604 0.545614    
## stat151     -5.604e-03  5.751e-03  -0.974 0.329897    
## stat152     -2.266e-03  5.644e-03  -0.401 0.688148    
## stat153      6.645e-03  5.728e-03   1.160 0.246040    
## stat154     -3.118e-04  5.713e-03  -0.055 0.956483    
## stat155      4.849e-03  5.663e-03   0.856 0.391945    
## stat156      8.893e-03  5.643e-03   1.576 0.115107    
## stat157      5.961e-03  5.621e-03   1.060 0.288975    
## stat158      2.188e-03  5.759e-03   0.380 0.704033    
## stat159      4.803e-03  5.632e-03   0.853 0.393830    
## stat160     -2.116e-03  5.720e-03  -0.370 0.711374    
## stat161      6.496e-03  5.688e-03   1.142 0.253552    
## stat162     -3.312e-04  5.609e-03  -0.059 0.952917    
## stat163      5.626e-03  5.708e-03   0.986 0.324283    
## stat164      7.250e-03  5.704e-03   1.271 0.203752    
## stat165     -4.282e-03  5.641e-03  -0.759 0.447869    
## stat166     -9.368e-03  5.590e-03  -1.676 0.093820 .  
## stat167     -1.262e-02  5.653e-03  -2.233 0.025612 *  
## stat168     -1.203e-03  5.640e-03  -0.213 0.831134    
## stat169      4.260e-03  5.698e-03   0.748 0.454707    
## stat170     -3.101e-03  5.690e-03  -0.545 0.585786    
## stat171      4.863e-04  5.703e-03   0.085 0.932052    
## stat172      9.813e-03  5.637e-03   1.741 0.081779 .  
## stat173      4.704e-03  5.671e-03   0.830 0.406841    
## stat174     -7.328e-04  5.676e-03  -0.129 0.897275    
## stat175     -5.938e-03  5.672e-03  -1.047 0.295183    
## stat176     -4.896e-03  5.632e-03  -0.869 0.384699    
## stat177     -1.213e-02  5.666e-03  -2.140 0.032381 *  
## stat178     -1.893e-03  5.685e-03  -0.333 0.739188    
## stat179      3.297e-03  5.681e-03   0.580 0.561708    
## stat180     -6.717e-03  5.627e-03  -1.194 0.232671    
## stat181      4.046e-03  5.663e-03   0.714 0.474981    
## stat182      3.202e-03  5.727e-03   0.559 0.576109    
## stat183      9.892e-03  5.639e-03   1.754 0.079448 .  
## stat184      2.195e-03  5.692e-03   0.386 0.699864    
## stat185      2.219e-03  5.639e-03   0.393 0.693985    
## stat186      5.739e-03  5.689e-03   1.009 0.313170    
## stat187     -9.522e-03  5.624e-03  -1.693 0.090493 .  
## stat188     -2.527e-03  5.641e-03  -0.448 0.654159    
## stat189     -3.673e-03  5.673e-03  -0.647 0.517408    
## stat190     -1.893e-03  5.632e-03  -0.336 0.736851    
## stat191     -9.444e-03  5.661e-03  -1.668 0.095353 .  
## stat192      6.100e-04  5.720e-03   0.107 0.915086    
## stat193      7.660e-03  5.716e-03   1.340 0.180264    
## stat194     -1.229e-04  5.664e-03  -0.022 0.982685    
## stat195      1.106e-02  5.676e-03   1.949 0.051304 .  
## stat196     -6.343e-03  5.731e-03  -1.107 0.268454    
## stat197      2.048e-03  5.615e-03   0.365 0.715356    
## stat198     -1.603e-02  5.652e-03  -2.836 0.004591 ** 
## stat199      2.513e-03  5.632e-03   0.446 0.655502    
## stat200     -2.667e-03  5.612e-03  -0.475 0.634656    
## stat201     -9.926e-05  5.641e-03  -0.018 0.985961    
## stat202      4.624e-04  5.720e-03   0.081 0.935576    
## stat203      4.582e-03  5.678e-03   0.807 0.419790    
## stat204     -6.742e-03  5.654e-03  -1.193 0.233104    
## stat205     -1.238e-03  5.633e-03  -0.220 0.826012    
## stat206     -9.062e-03  5.683e-03  -1.594 0.110902    
## stat207      1.366e-02  5.670e-03   2.410 0.015993 *  
## stat208      3.377e-03  5.670e-03   0.596 0.551504    
## stat209      1.337e-03  5.619e-03   0.238 0.811968    
## stat210     -8.234e-03  5.692e-03  -1.447 0.148057    
## stat211     -4.407e-03  5.638e-03  -0.782 0.434512    
## stat212      6.703e-03  5.689e-03   1.178 0.238749    
## stat213     -2.713e-03  5.681e-03  -0.477 0.633049    
## stat214     -7.100e-03  5.650e-03  -1.257 0.208899    
## stat215     -7.214e-03  5.656e-03  -1.276 0.202168    
## stat216      3.714e-03  5.661e-03   0.656 0.511795    
## stat217      6.777e-03  5.668e-03   1.196 0.231934    
## sqrt.x18     8.021e-01  2.161e-02  37.116  < 2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.724 on 5453 degrees of freedom
## Multiple R-squared:  0.4011, Adjusted R-squared:  0.3748 
## F-statistic: 15.22 on 240 and 5453 DF,  p-value: < 2.2e-16
cd.full2 = plot.diagnostics(model.full2, data.train2)

## [1] "Number of data points that have Cook's D > 4/n: 251"
## [1] "Number of data points that have Cook's D > 1: 0"
# much more normal residuals than before. 
# See if you can check the distribution (boxplots) of the high leverage points and the other points
# High Leverage Plot MMORO ### 
plotData = data.train %>% 
  rownames_to_column() %>%
  mutate(type=ifelse(rowname %in% high.cd,'High','Normal')) %>%
  dplyr::select(type,target=one_of(label.names))

ggplot(data=plotData, aes(x=type,y=target)) +
  geom_boxplot(fill='light blue',outlier.shape=NA) +
  scale_y_continuous(name="Target Variable Values") +
  theme_light() +
  ggtitle('Distribution of High Leverage Points and Normal  Points')

model.null = lm(grand.mean.formula, data.train)
# summary(model.null)
# plot.diagnostics(model.null, data.train)
model.null2 = lm(grand.mean.formula, data.train2)
# summary(model.null2)
# plot.diagnostics(model.null2, data.train2)

Variable Selection

Basic: http://www.stat.columbia.edu/~martin/W2024/R10.pdf Cross Validation + Other Metrics: http://www.sthda.com/english/articles/37-model-selection-essentials-in-r/154-stepwise-regression-essentials-in-r/

Forward Selection (w/ full train)

Train

if (algo.forward == TRUE){
  t1 = Sys.time()
  
  model.forward = step(model.null, scope=list(lower=model.null, upper=model.full), direction="forward", trace = 0)
  print(summary(model.forward))
  #saveRDS(model.forward,file = "model_forward.rds")
  
  t2 = Sys.time()
  print (paste("Time taken for Forward Selection: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.forward, data.train)
}

Test

if (algo.forward == TRUE){
  test.model(model.forward, data.test, "Forward Selection")
}

Forward Selection (w/ filtered train)

Train

if (algo.forward == TRUE){
  t1 = Sys.time()
  
  model.forward2 = step(model.null2, scope=list(lower=model.null2, upper=model.full2), direction="forward", trace = 0)
  print(summary(model.forward2))
  #saveRDS(model.forward,file = "model_forward.rds")
  
  t2 = Sys.time()
  print (paste("Time taken for Forward Selection: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.forward2, data.train2)
}

Test

if (algo.forward == TRUE){
  test.model(model.forward2, data.test, "Forward Selection (2)")
}

Forward Selection with CV (w/ full train)

Train

if (algo.forward.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   , data = data.train
                                   , method = "leapForward"
                                   , feature.names = feature.names)
  model.forward = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 13 on full training set
##     nvmax      RMSE  Rsquared       MAE     RMSESD RsquaredSD      MAESD
## 1       1 0.9365643 0.1236737 0.7536160 0.01668298 0.01993896 0.01279423
## 2       2 0.9079494 0.1770387 0.7298664 0.01750665 0.02892992 0.01357292
## 3       3 0.8944193 0.2008249 0.7155952 0.01901631 0.02919065 0.01320105
## 4       4 0.8742691 0.2363913 0.6937420 0.02185141 0.03097169 0.01484363
## 5       5 0.8638186 0.2544505 0.6856338 0.02338283 0.03597771 0.01555964
## 6       6 0.8614669 0.2584423 0.6838031 0.02330853 0.03655409 0.01630994
## 7       7 0.8616264 0.2581201 0.6838827 0.02252992 0.03576400 0.01543925
## 8       8 0.8606372 0.2598909 0.6832436 0.02346111 0.03617819 0.01574605
## 9       9 0.8589201 0.2629713 0.6817973 0.02303138 0.03595457 0.01457108
## 10     10 0.8560569 0.2677599 0.6802123 0.02220196 0.03458445 0.01366388
## 11     11 0.8566254 0.2668271 0.6804815 0.02260781 0.03462453 0.01470736
## 12     12 0.8560935 0.2677508 0.6801040 0.02254458 0.03351414 0.01464169
## 13     13 0.8557582 0.2683145 0.6801978 0.02209939 0.03340193 0.01515055
## 14     14 0.8560136 0.2678391 0.6807736 0.02126001 0.03146312 0.01439859
## 15     15 0.8564294 0.2671722 0.6809500 0.02114252 0.03193376 0.01400192
## 16     16 0.8572779 0.2657165 0.6813558 0.02112224 0.03148819 0.01412545
## 17     17 0.8584022 0.2638391 0.6812557 0.02118983 0.03089284 0.01417726
## 18     18 0.8584713 0.2637941 0.6812881 0.02175667 0.03231409 0.01475928
## 19     19 0.8586138 0.2635734 0.6814524 0.02161393 0.03117075 0.01456148
## 20     20 0.8592247 0.2625221 0.6818734 0.02162926 0.03098543 0.01479863
## 21     21 0.8591232 0.2627095 0.6817160 0.02100173 0.03027013 0.01418736
## 22     22 0.8595672 0.2619685 0.6819990 0.02036708 0.02970497 0.01366103
## 23     23 0.8599657 0.2613521 0.6822373 0.01991861 0.02926440 0.01327024
## 24     24 0.8600881 0.2611761 0.6823737 0.02007091 0.02934298 0.01373595
## 25     25 0.8597106 0.2618264 0.6820676 0.01977088 0.02939082 0.01361317
## 26     26 0.8597122 0.2618222 0.6821121 0.01991373 0.02947959 0.01351560
## 27     27 0.8602658 0.2609066 0.6828477 0.01955309 0.02872751 0.01344418
## 28     28 0.8607311 0.2601434 0.6837223 0.01927953 0.02851973 0.01333752
## 29     29 0.8612540 0.2593429 0.6842532 0.01932870 0.02832327 0.01312468
## 30     30 0.8612463 0.2593275 0.6845514 0.01892673 0.02811348 0.01318904
## 31     31 0.8611470 0.2595574 0.6841864 0.01944845 0.02890544 0.01335162
## 32     32 0.8615212 0.2588937 0.6840893 0.01914864 0.02839104 0.01349669
## 33     33 0.8617137 0.2585664 0.6841523 0.01934134 0.02923018 0.01385934
## 34     34 0.8618148 0.2584260 0.6839470 0.01912075 0.02953677 0.01375559
## 35     35 0.8622505 0.2577406 0.6843609 0.01905131 0.02905255 0.01360498
## 36     36 0.8627138 0.2569905 0.6846949 0.01920017 0.02907708 0.01353399
## 37     37 0.8627289 0.2569871 0.6847022 0.01946511 0.02971603 0.01372192
## 38     38 0.8624466 0.2574493 0.6843393 0.01939203 0.02970203 0.01359252
## 39     39 0.8623031 0.2577069 0.6840452 0.01954666 0.03006506 0.01374611
## 40     40 0.8624198 0.2574951 0.6841652 0.01921446 0.02954165 0.01378647
## 41     41 0.8626752 0.2571075 0.6841373 0.01945054 0.02956580 0.01396677
## 42     42 0.8629127 0.2567708 0.6843409 0.01982525 0.03012414 0.01429586
## 43     43 0.8627881 0.2569998 0.6840421 0.02016666 0.03039427 0.01463079
## 44     44 0.8631789 0.2563902 0.6843141 0.01985587 0.03001733 0.01443269
## 45     45 0.8633226 0.2561757 0.6842709 0.01993758 0.02977203 0.01446995
## 46     46 0.8635918 0.2557670 0.6843090 0.01949306 0.02977477 0.01394600
## 47     47 0.8638782 0.2552995 0.6846696 0.01951231 0.02909490 0.01415426
## 48     48 0.8639558 0.2552146 0.6848365 0.01960761 0.02876838 0.01391325
## 49     49 0.8642093 0.2548113 0.6850011 0.01962428 0.02901819 0.01392481
## 50     50 0.8644395 0.2544376 0.6848926 0.01950829 0.02874122 0.01384388
## 51     51 0.8646434 0.2541930 0.6850946 0.01986247 0.02912193 0.01419354
## 52     52 0.8647168 0.2540937 0.6851667 0.01988526 0.02897800 0.01426402
## 53     53 0.8650197 0.2536414 0.6853678 0.02002009 0.02965776 0.01437206
## 54     54 0.8651119 0.2534633 0.6852614 0.01970096 0.02947804 0.01412519
## 55     55 0.8652025 0.2533017 0.6852252 0.01977879 0.02915355 0.01420397
## 56     56 0.8652428 0.2532679 0.6852028 0.01970847 0.02938519 0.01419193
## 57     57 0.8653956 0.2530118 0.6853625 0.01959122 0.02897696 0.01416933
## 58     58 0.8655504 0.2528030 0.6853860 0.01973181 0.02955372 0.01437524
## 59     59 0.8657764 0.2524767 0.6854909 0.01969607 0.02973555 0.01394231
## 60     60 0.8655095 0.2529617 0.6854701 0.02007968 0.03058420 0.01397471
## 61     61 0.8654659 0.2530290 0.6854764 0.02000677 0.03069539 0.01392198
## 62     62 0.8653683 0.2531643 0.6852586 0.01998814 0.03042790 0.01365859
## 63     63 0.8652272 0.2533810 0.6850642 0.01992762 0.02996824 0.01357893
## 64     64 0.8653442 0.2532359 0.6852150 0.02005351 0.02996207 0.01391246
## 65     65 0.8656406 0.2527805 0.6852743 0.02010298 0.02977521 0.01392143
## 66     66 0.8658982 0.2523735 0.6853409 0.02012636 0.02959888 0.01381004
## 67     67 0.8664001 0.2516201 0.6853888 0.02027279 0.02956650 0.01407158
## 68     68 0.8667609 0.2510732 0.6859019 0.02002746 0.02927751 0.01396710
## 69     69 0.8667553 0.2511074 0.6861604 0.02032092 0.02962459 0.01386107
## 70     70 0.8666772 0.2512184 0.6862749 0.02037721 0.02924482 0.01393443
## 71     71 0.8669382 0.2508268 0.6863397 0.02005043 0.02859833 0.01375606
## 72     72 0.8669444 0.2508340 0.6862750 0.02019659 0.02905987 0.01378084
## 73     73 0.8668970 0.2509115 0.6861946 0.02048076 0.02927464 0.01370287
## 74     74 0.8671027 0.2506445 0.6865076 0.02053195 0.02958114 0.01376247
## 75     75 0.8671110 0.2506429 0.6865424 0.02035403 0.02863906 0.01364873
## 76     76 0.8672183 0.2505043 0.6867588 0.02052527 0.02857076 0.01392372
## 77     77 0.8667592 0.2512374 0.6862806 0.02076076 0.02876089 0.01416686
## 78     78 0.8671237 0.2506902 0.6866500 0.02076330 0.02897214 0.01402541
## 79     79 0.8672382 0.2505485 0.6867598 0.02068651 0.02870341 0.01401598
## 80     80 0.8672894 0.2504446 0.6868186 0.02055886 0.02854447 0.01394979
## 81     81 0.8675247 0.2501010 0.6869850 0.02042190 0.02874951 0.01401582
## 82     82 0.8677339 0.2497567 0.6872035 0.02048147 0.02878634 0.01401275
## 83     83 0.8679755 0.2494012 0.6874998 0.02047249 0.02892510 0.01387990
## 84     84 0.8682912 0.2488872 0.6878119 0.02050882 0.02868972 0.01390536
## 85     85 0.8684991 0.2485580 0.6879382 0.02050820 0.02873943 0.01383865
## 86     86 0.8685331 0.2485152 0.6880214 0.02053196 0.02851228 0.01411839
## 87     87 0.8686396 0.2483569 0.6879383 0.02045703 0.02845340 0.01409177
## 88     88 0.8687014 0.2483021 0.6880916 0.02069513 0.02894650 0.01416545
## 89     89 0.8687231 0.2482767 0.6881354 0.02091441 0.02914335 0.01425981
## 90     90 0.8686406 0.2484447 0.6879370 0.02099591 0.02929621 0.01427312
## 91     91 0.8687061 0.2483347 0.6880508 0.02109278 0.02916987 0.01446282
## 92     92 0.8689498 0.2479420 0.6882635 0.02125122 0.02925812 0.01466643
## 93     93 0.8689835 0.2478756 0.6882322 0.02097122 0.02912780 0.01449235
## 94     94 0.8691293 0.2476558 0.6884069 0.02096899 0.02932902 0.01459235
## 95     95 0.8690819 0.2477219 0.6883831 0.02107959 0.02965350 0.01452914
## 96     96 0.8692594 0.2474244 0.6883610 0.02099648 0.02950235 0.01457217
## 97     97 0.8691618 0.2475576 0.6883586 0.02083575 0.02932564 0.01441647
## 98     98 0.8692747 0.2473859 0.6883764 0.02062616 0.02904967 0.01426837
## 99     99 0.8692589 0.2474212 0.6883694 0.02054983 0.02905682 0.01427949
## 100   100 0.8694232 0.2471951 0.6886814 0.02076209 0.02922961 0.01458157
## 101   101 0.8693655 0.2472845 0.6886878 0.02065976 0.02926472 0.01461846
## 102   102 0.8694445 0.2471661 0.6888801 0.02090803 0.02956336 0.01468292
## 103   103 0.8692959 0.2473847 0.6887484 0.02086671 0.02942886 0.01466039
## 104   104 0.8696935 0.2467643 0.6891023 0.02082343 0.02924897 0.01480760
## 105   105 0.8697127 0.2467368 0.6891084 0.02080207 0.02929875 0.01476674
## 106   106 0.8698949 0.2464748 0.6892429 0.02099592 0.02939508 0.01483018
## 107   107 0.8700863 0.2461699 0.6893832 0.02111988 0.02965248 0.01472276
## 108   108 0.8703431 0.2457824 0.6895004 0.02118405 0.02975344 0.01468516
## 109   109 0.8705756 0.2454053 0.6896912 0.02119820 0.02952493 0.01477070
## 110   110 0.8706570 0.2452537 0.6897338 0.02117432 0.02950931 0.01458352
## 111   111 0.8706297 0.2452986 0.6896660 0.02112027 0.02959290 0.01456082
## 112   112 0.8705537 0.2454377 0.6896304 0.02107823 0.02957673 0.01462308
## 113   113 0.8705082 0.2455160 0.6896114 0.02097195 0.02944396 0.01453365
## 114   114 0.8705754 0.2454224 0.6897450 0.02090983 0.02920766 0.01461099
## 115   115 0.8705797 0.2454085 0.6897209 0.02083617 0.02914184 0.01456153
## 116   116 0.8705945 0.2453966 0.6897878 0.02082758 0.02909297 0.01461738
## 117   117 0.8706290 0.2453491 0.6898487 0.02079368 0.02916331 0.01450034
## 118   118 0.8706035 0.2454112 0.6897514 0.02068914 0.02909768 0.01438746
## 119   119 0.8706283 0.2454149 0.6898045 0.02080031 0.02921949 0.01436571
## 120   120 0.8707871 0.2451897 0.6898049 0.02084206 0.02942329 0.01433198
## 121   121 0.8709820 0.2448916 0.6898475 0.02080758 0.02961627 0.01431367
## 122   122 0.8709697 0.2449152 0.6898767 0.02081258 0.02949367 0.01433406
## 123   123 0.8710912 0.2447527 0.6899111 0.02082509 0.02945656 0.01436855
## 124   124 0.8711597 0.2446399 0.6900852 0.02071027 0.02925190 0.01435693
## 125   125 0.8712877 0.2444422 0.6900894 0.02083640 0.02923366 0.01441682
## 126   126 0.8713941 0.2442847 0.6901772 0.02081951 0.02926134 0.01431285
## 127   127 0.8712859 0.2444448 0.6901236 0.02071286 0.02914460 0.01424163
## 128   128 0.8712086 0.2445754 0.6900619 0.02078744 0.02919258 0.01423561
## 129   129 0.8710531 0.2448400 0.6899657 0.02086813 0.02941884 0.01412489
## 130   130 0.8711358 0.2447343 0.6900480 0.02093237 0.02945822 0.01410371
## 131   131 0.8712046 0.2446274 0.6900496 0.02089466 0.02926282 0.01423287
## 132   132 0.8711058 0.2447591 0.6900162 0.02084927 0.02903770 0.01424155
## 133   133 0.8710401 0.2448937 0.6899364 0.02091776 0.02902598 0.01427673
## 134   134 0.8710319 0.2449147 0.6899250 0.02094361 0.02910491 0.01426124
## 135   135 0.8711151 0.2447972 0.6899568 0.02064792 0.02861444 0.01406953
## 136   136 0.8712306 0.2446072 0.6901280 0.02052301 0.02832357 0.01405575
## 137   137 0.8712621 0.2445669 0.6901539 0.02058951 0.02851199 0.01410071
## 138   138 0.8710916 0.2448250 0.6899997 0.02065806 0.02836444 0.01408700
## 139   139 0.8712367 0.2446157 0.6901616 0.02055622 0.02811890 0.01397275
## 140   140 0.8711998 0.2446712 0.6901027 0.02060822 0.02799414 0.01406669
## 141   141 0.8713158 0.2444969 0.6901449 0.02071803 0.02823740 0.01396832
## 142   142 0.8713034 0.2445294 0.6901679 0.02069684 0.02819298 0.01408468
## 143   143 0.8713683 0.2444281 0.6901645 0.02060307 0.02802407 0.01401607
## 144   144 0.8713569 0.2444409 0.6902315 0.02056513 0.02818447 0.01396179
## 145   145 0.8713774 0.2444170 0.6902930 0.02054694 0.02828622 0.01386957
## 146   146 0.8712730 0.2445769 0.6902728 0.02044876 0.02807463 0.01375032
## 147   147 0.8712527 0.2446188 0.6901834 0.02050523 0.02801955 0.01384962
## 148   148 0.8710502 0.2449493 0.6900083 0.02047085 0.02795599 0.01381975
## 149   149 0.8711175 0.2448547 0.6901115 0.02055742 0.02817307 0.01379476
## 150   150 0.8711157 0.2448667 0.6901539 0.02067697 0.02827671 0.01387178
## 151   151 0.8710833 0.2449303 0.6901347 0.02072097 0.02829473 0.01391863
## 152   152 0.8711162 0.2448828 0.6901485 0.02075774 0.02835297 0.01392269
## 153   153 0.8711771 0.2447953 0.6902609 0.02086978 0.02843315 0.01397148
## 154   154 0.8712347 0.2447184 0.6902335 0.02091519 0.02848442 0.01412556
## 155   155 0.8711361 0.2448762 0.6901372 0.02093749 0.02842538 0.01415616
## 156   156 0.8712337 0.2447406 0.6901292 0.02108884 0.02847537 0.01421835
## 157   157 0.8711736 0.2448526 0.6900581 0.02105340 0.02850010 0.01409611
## 158   158 0.8711462 0.2448794 0.6901316 0.02100275 0.02838615 0.01421449
## 159   159 0.8712409 0.2447563 0.6901944 0.02112370 0.02860997 0.01424422
## 160   160 0.8712807 0.2446967 0.6901892 0.02120499 0.02877726 0.01425095
## 161   161 0.8711596 0.2448738 0.6900781 0.02119133 0.02875914 0.01428886
## 162   162 0.8712774 0.2446874 0.6901690 0.02116432 0.02872789 0.01428432
## 163   163 0.8711287 0.2449168 0.6900464 0.02123452 0.02876368 0.01437950
## 164   164 0.8710915 0.2449623 0.6900716 0.02118637 0.02877348 0.01436387
## 165   165 0.8711921 0.2448126 0.6902052 0.02125244 0.02886400 0.01437262
## 166   166 0.8711645 0.2448510 0.6902288 0.02124332 0.02881924 0.01430674
## 167   167 0.8712652 0.2447059 0.6903123 0.02132825 0.02892615 0.01434286
## 168   168 0.8712140 0.2447864 0.6902311 0.02127253 0.02885056 0.01423407
## 169   169 0.8711383 0.2449069 0.6901360 0.02130524 0.02909020 0.01423164
## 170   170 0.8711185 0.2449465 0.6900541 0.02135655 0.02917371 0.01428241
## 171   171 0.8711749 0.2448649 0.6901272 0.02133800 0.02929512 0.01418843
## 172   172 0.8711349 0.2449275 0.6900604 0.02132855 0.02948041 0.01408380
## 173   173 0.8711275 0.2449570 0.6900108 0.02133465 0.02960043 0.01408747
## 174   174 0.8711346 0.2449414 0.6900717 0.02124822 0.02958816 0.01399021
## 175   175 0.8711532 0.2449036 0.6901101 0.02119081 0.02942734 0.01395170
## 176   176 0.8711093 0.2449749 0.6900312 0.02112456 0.02933080 0.01392380
## 177   177 0.8710952 0.2449867 0.6900572 0.02106202 0.02927793 0.01389001
## 178   178 0.8710804 0.2450125 0.6900561 0.02113964 0.02942374 0.01395202
## 179   179 0.8710763 0.2450186 0.6900630 0.02119212 0.02947254 0.01396369
## 180   180 0.8710832 0.2450131 0.6900365 0.02123646 0.02942406 0.01398527
## 181   181 0.8710736 0.2450396 0.6900090 0.02113791 0.02930619 0.01390232
## 182   182 0.8710458 0.2450890 0.6899131 0.02109437 0.02929250 0.01385609
## 183   183 0.8710961 0.2450185 0.6899234 0.02112379 0.02929783 0.01387969
## 184   184 0.8710271 0.2451176 0.6898817 0.02103771 0.02919615 0.01384232
## 185   185 0.8710043 0.2451458 0.6898444 0.02103426 0.02911157 0.01377907
## 186   186 0.8710048 0.2451473 0.6898530 0.02103676 0.02912609 0.01376496
## 187   187 0.8709963 0.2451576 0.6898529 0.02108785 0.02924877 0.01377115
## 188   188 0.8709842 0.2451876 0.6899148 0.02111971 0.02927357 0.01389706
## 189   189 0.8709873 0.2451916 0.6899685 0.02109875 0.02922354 0.01395655
## 190   190 0.8710406 0.2451060 0.6899844 0.02109246 0.02924752 0.01401830
## 191   191 0.8709588 0.2452390 0.6899816 0.02109039 0.02916670 0.01400681
## 192   192 0.8709811 0.2451996 0.6899624 0.02107950 0.02912471 0.01398331
## 193   193 0.8709853 0.2451803 0.6899733 0.02103639 0.02904459 0.01395551
## 194   194 0.8709182 0.2452925 0.6898874 0.02110993 0.02912974 0.01402862
## 195   195 0.8709558 0.2452263 0.6899316 0.02113989 0.02907110 0.01404849
## 196   196 0.8709432 0.2452508 0.6898917 0.02111882 0.02903418 0.01403962
## 197   197 0.8709598 0.2452248 0.6898924 0.02111907 0.02904338 0.01403203
## 198   198 0.8709514 0.2452282 0.6899063 0.02117657 0.02909739 0.01408048
## 199   199 0.8709082 0.2452996 0.6898663 0.02114716 0.02903176 0.01406275
## 200   200 0.8709011 0.2453082 0.6898860 0.02110396 0.02898923 0.01401707
## 201   201 0.8709807 0.2451867 0.6899327 0.02110346 0.02900607 0.01405703
## 202   202 0.8708745 0.2453477 0.6898521 0.02107782 0.02899604 0.01402407
## 203   203 0.8708584 0.2453716 0.6898497 0.02105388 0.02894708 0.01398756
## 204   204 0.8708581 0.2453766 0.6898513 0.02105834 0.02890832 0.01399413
## 205   205 0.8708489 0.2454034 0.6898480 0.02109830 0.02899896 0.01403535
## 206   206 0.8708463 0.2454086 0.6898147 0.02109298 0.02900501 0.01405162
## 207   207 0.8708843 0.2453488 0.6898349 0.02108793 0.02894228 0.01407714
## 208   208 0.8708767 0.2453638 0.6898140 0.02106154 0.02888536 0.01407658
## 209   209 0.8708897 0.2453462 0.6898105 0.02106403 0.02890307 0.01404738
## 210   210 0.8708898 0.2453374 0.6898011 0.02101205 0.02885188 0.01403132
## 211   211 0.8709184 0.2452909 0.6898232 0.02100778 0.02883210 0.01400844
## 212   212 0.8709038 0.2453085 0.6897977 0.02097485 0.02878769 0.01396455
## 213   213 0.8709212 0.2452829 0.6898120 0.02099960 0.02874074 0.01397542
## 214   214 0.8709336 0.2452647 0.6898221 0.02095327 0.02868699 0.01393476
## 215   215 0.8709541 0.2452294 0.6898285 0.02093752 0.02866226 0.01393469
## 216   216 0.8709181 0.2452826 0.6898032 0.02091984 0.02862750 0.01391265
## 217   217 0.8709383 0.2452499 0.6898019 0.02090191 0.02857891 0.01389143
## 218   218 0.8709594 0.2452213 0.6898122 0.02092089 0.02859895 0.01389152
## 219   219 0.8709504 0.2452379 0.6898225 0.02095061 0.02860147 0.01392855
## 220   220 0.8709595 0.2452215 0.6898295 0.02091165 0.02857674 0.01389363
## 221   221 0.8709620 0.2452193 0.6898415 0.02091774 0.02861461 0.01390884
## 222   222 0.8709319 0.2452631 0.6898199 0.02092204 0.02862018 0.01392612
## 223   223 0.8709661 0.2452133 0.6898494 0.02091606 0.02863118 0.01390926
## 224   224 0.8709749 0.2451958 0.6898617 0.02087261 0.02860788 0.01387422
## 225   225 0.8709869 0.2451785 0.6898824 0.02088212 0.02864416 0.01388276
## 226   226 0.8709852 0.2451818 0.6898890 0.02087977 0.02866299 0.01387216
## 227   227 0.8709797 0.2451895 0.6898882 0.02086446 0.02866466 0.01385896
## 228   228 0.8709643 0.2452123 0.6898701 0.02087165 0.02869113 0.01385553
## 229   229 0.8709644 0.2452103 0.6898638 0.02085810 0.02867718 0.01384820
## 230   230 0.8709578 0.2452244 0.6898649 0.02087034 0.02869888 0.01386252
## 231   231 0.8709710 0.2452039 0.6898742 0.02086966 0.02867474 0.01386523
## 232   232 0.8709715 0.2452026 0.6898689 0.02086836 0.02866868 0.01385658
## 233   233 0.8709690 0.2452062 0.6898742 0.02086723 0.02865404 0.01385623
## 234   234 0.8709601 0.2452207 0.6898732 0.02087161 0.02865150 0.01385151
## 235   235 0.8709550 0.2452287 0.6898716 0.02086803 0.02864109 0.01384866
## 236   236 0.8709448 0.2452437 0.6898626 0.02086830 0.02862462 0.01385545
## 237   237 0.8709466 0.2452403 0.6898668 0.02087086 0.02862439 0.01386168
## 238   238 0.8709469 0.2452393 0.6898647 0.02086630 0.02861955 0.01386117
## 239   239 0.8709465 0.2452396 0.6898656 0.02086696 0.02862015 0.01386082
## 240   240 0.8709482 0.2452370 0.6898676 0.02086412 0.02861708 0.01385710
##    nvmax
## 13    13
## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## Coefficients of final model:

##   (Intercept)            x4            x7            x8            x9 
## -3.832583e+00 -1.336998e-03  3.502871e-01  1.316519e-02  1.026206e-01 
##           x10           x11           x16           x17           x21 
##  3.532314e-02  6.547159e+06  2.676086e-02  3.808197e-02  3.363089e-03 
##        stat14        stat98       stat110      sqrt.x18 
## -2.807359e-02  1.008003e-01 -9.721343e-02  7.689960e-01

Test

if (algo.forward.caret == TRUE){
    test.model(model.forward, data.test
             ,method = 'leapForward',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE)
}
## [1] "Summary of predicted values: "
##      Min.   1st Qu.    Median      Mean   3rd Qu.      Max. 
## -1.615548 -0.383649  0.009481 -0.010402  0.359098  1.565338 
## [1] "leapForward  Test MSE: 0.707385129532587"

Forward Selection with CV (w/ filtered train)

Train

if (algo.forward.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train2
                                   ,method =  "leapForward"
                                   ,feature.names = feature.names)
  model.forward = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 14 on full training set
##     nvmax      RMSE  Rsquared       MAE     RMSESD RsquaredSD      MAESD
## 1       1 0.8409023 0.1577218 0.6879777 0.01268808 0.03072104 0.01129038
## 2       2 0.8033473 0.2319526 0.6608168 0.01175259 0.03593694 0.01051046
## 3       3 0.7837206 0.2693743 0.6423414 0.01806620 0.04565986 0.01381834
## 4       4 0.7609968 0.3105987 0.6196852 0.01993332 0.04660860 0.01470158
## 5       5 0.7484550 0.3326744 0.6098049 0.01972914 0.04767659 0.01512932
## 6       6 0.7445668 0.3396824 0.6073199 0.01970809 0.04865528 0.01514497
## 7       7 0.7443466 0.3399573 0.6072336 0.01890122 0.04736629 0.01403613
## 8       8 0.7424917 0.3430195 0.6063003 0.01804282 0.04515576 0.01382522
## 9       9 0.7401689 0.3468840 0.6048158 0.01731572 0.04337614 0.01408610
## 10     10 0.7365026 0.3532011 0.6026346 0.01616534 0.04154642 0.01295357
## 11     11 0.7376458 0.3512336 0.6032208 0.01601369 0.04073140 0.01253736
## 12     12 0.7380996 0.3503967 0.6034986 0.01642763 0.04172391 0.01255994
## 13     13 0.7375208 0.3514510 0.6030971 0.01701738 0.04144952 0.01306405
## 14     14 0.7363273 0.3535263 0.6023590 0.01659820 0.04066941 0.01275084
## 15     15 0.7363301 0.3534808 0.6018782 0.01553689 0.03820284 0.01210340
## 16     16 0.7366222 0.3530071 0.6022254 0.01480923 0.03657084 0.01189792
## 17     17 0.7368169 0.3526335 0.6025223 0.01472815 0.03541949 0.01199575
## 18     18 0.7363804 0.3533739 0.6021831 0.01378350 0.03436556 0.01122598
## 19     19 0.7373765 0.3516857 0.6027204 0.01397516 0.03415938 0.01164362
## 20     20 0.7373494 0.3517097 0.6024701 0.01386563 0.03336486 0.01206019
## 21     21 0.7372872 0.3518576 0.6025976 0.01408742 0.03349182 0.01227980
## 22     22 0.7369927 0.3524393 0.6019267 0.01364810 0.03301743 0.01192134
## 23     23 0.7367653 0.3529254 0.6016105 0.01457062 0.03384498 0.01290424
## 24     24 0.7373179 0.3520332 0.6020201 0.01431984 0.03381883 0.01270119
## 25     25 0.7372049 0.3523123 0.6019132 0.01440990 0.03406174 0.01270403
## 26     26 0.7373940 0.3519885 0.6019777 0.01413314 0.03327982 0.01275523
## 27     27 0.7375695 0.3517684 0.6020455 0.01428035 0.03361162 0.01273003
## 28     28 0.7377750 0.3514236 0.6020313 0.01457593 0.03423781 0.01271837
## 29     29 0.7378507 0.3513274 0.6022501 0.01512819 0.03441831 0.01320870
## 30     30 0.7376914 0.3516571 0.6023938 0.01488370 0.03420653 0.01322901
## 31     31 0.7378856 0.3513142 0.6024847 0.01433757 0.03334595 0.01300728
## 32     32 0.7379469 0.3511519 0.6025682 0.01442633 0.03329000 0.01335014
## 33     33 0.7378786 0.3513039 0.6023042 0.01450974 0.03341276 0.01341846
## 34     34 0.7376079 0.3517986 0.6022509 0.01501636 0.03388655 0.01359260
## 35     35 0.7378932 0.3513131 0.6025848 0.01498840 0.03467393 0.01371315
## 36     36 0.7380828 0.3510071 0.6025050 0.01514027 0.03474425 0.01366704
## 37     37 0.7381048 0.3510052 0.6023614 0.01483878 0.03383483 0.01331592
## 38     38 0.7378948 0.3513428 0.6023185 0.01460352 0.03346911 0.01328132
## 39     39 0.7374663 0.3520729 0.6020512 0.01430466 0.03349454 0.01263274
## 40     40 0.7372526 0.3524791 0.6018468 0.01458445 0.03361291 0.01254014
## 41     41 0.7377576 0.3516759 0.6021838 0.01488598 0.03346599 0.01302169
## 42     42 0.7377250 0.3517806 0.6019835 0.01530237 0.03346535 0.01323195
## 43     43 0.7382842 0.3508119 0.6024108 0.01519865 0.03247491 0.01346430
## 44     44 0.7378829 0.3514900 0.6022882 0.01531203 0.03279355 0.01333877
## 45     45 0.7374289 0.3522258 0.6020804 0.01507482 0.03263129 0.01296224
## 46     46 0.7376333 0.3519120 0.6023221 0.01489583 0.03187344 0.01300005
## 47     47 0.7378879 0.3515049 0.6024948 0.01540893 0.03201811 0.01329795
## 48     48 0.7376799 0.3518570 0.6026234 0.01489747 0.03167943 0.01287096
## 49     49 0.7378366 0.3516011 0.6025527 0.01527456 0.03153035 0.01305666
## 50     50 0.7377295 0.3518508 0.6026381 0.01560731 0.03181406 0.01342387
## 51     51 0.7378531 0.3517062 0.6028346 0.01604639 0.03294225 0.01371526
## 52     52 0.7374454 0.3523726 0.6024782 0.01582657 0.03276496 0.01305556
## 53     53 0.7376949 0.3519464 0.6028039 0.01605705 0.03284425 0.01346419
## 54     54 0.7374910 0.3523456 0.6025378 0.01626622 0.03295962 0.01401332
## 55     55 0.7376179 0.3521012 0.6026707 0.01631152 0.03299921 0.01431387
## 56     56 0.7377300 0.3519154 0.6028957 0.01644831 0.03348581 0.01444114
## 57     57 0.7378323 0.3517590 0.6030916 0.01667356 0.03360485 0.01452518
## 58     58 0.7381273 0.3512737 0.6031913 0.01685727 0.03365168 0.01456214
## 59     59 0.7384817 0.3506643 0.6035779 0.01666756 0.03356277 0.01442560
## 60     60 0.7386168 0.3504561 0.6035504 0.01668201 0.03364547 0.01449175
## 61     61 0.7385164 0.3507006 0.6033790 0.01666424 0.03359198 0.01436613
## 62     62 0.7384053 0.3509024 0.6031182 0.01683266 0.03364108 0.01444710
## 63     63 0.7386611 0.3504853 0.6031194 0.01694844 0.03391777 0.01457449
## 64     64 0.7388433 0.3501824 0.6033354 0.01678706 0.03399561 0.01414916
## 65     65 0.7394976 0.3491328 0.6037521 0.01659730 0.03379036 0.01415415
## 66     66 0.7396237 0.3489432 0.6039282 0.01672796 0.03360473 0.01426390
## 67     67 0.7393918 0.3493999 0.6038565 0.01698676 0.03380225 0.01458520
## 68     68 0.7391882 0.3497452 0.6035051 0.01678115 0.03335541 0.01423347
## 69     69 0.7393421 0.3494831 0.6035126 0.01670547 0.03366713 0.01412898
## 70     70 0.7390776 0.3499068 0.6032154 0.01660027 0.03379206 0.01431153
## 71     71 0.7390320 0.3500384 0.6032087 0.01658264 0.03406738 0.01427845
## 72     72 0.7390704 0.3499827 0.6033204 0.01647361 0.03390572 0.01406481
## 73     73 0.7393556 0.3495324 0.6037707 0.01637080 0.03420723 0.01410007
## 74     74 0.7393945 0.3494321 0.6038196 0.01623161 0.03364671 0.01385869
## 75     75 0.7394061 0.3494062 0.6037594 0.01588029 0.03246240 0.01371690
## 76     76 0.7394101 0.3494216 0.6036882 0.01602458 0.03315104 0.01377992
## 77     77 0.7392948 0.3495968 0.6037023 0.01595436 0.03318927 0.01391237
## 78     78 0.7391376 0.3498437 0.6035905 0.01551796 0.03250127 0.01342401
## 79     79 0.7391141 0.3499324 0.6035568 0.01546985 0.03223502 0.01335025
## 80     80 0.7391983 0.3498254 0.6035858 0.01557878 0.03251788 0.01342815
## 81     81 0.7394632 0.3493566 0.6038344 0.01562631 0.03244089 0.01335892
## 82     82 0.7396552 0.3490592 0.6040567 0.01551413 0.03236259 0.01361742
## 83     83 0.7397361 0.3489413 0.6040484 0.01538773 0.03227724 0.01387407
## 84     84 0.7400190 0.3484906 0.6041007 0.01534481 0.03244851 0.01377688
## 85     85 0.7399730 0.3485427 0.6040688 0.01537103 0.03253281 0.01370340
## 86     86 0.7402330 0.3481140 0.6043211 0.01509720 0.03194662 0.01347058
## 87     87 0.7399468 0.3485819 0.6041069 0.01513580 0.03204793 0.01359579
## 88     88 0.7400412 0.3484259 0.6042029 0.01531371 0.03242480 0.01370866
## 89     89 0.7400726 0.3483668 0.6042542 0.01501019 0.03213114 0.01345348
## 90     90 0.7399350 0.3485624 0.6042765 0.01495223 0.03191414 0.01328022
## 91     91 0.7400486 0.3483833 0.6042815 0.01487719 0.03182798 0.01320196
## 92     92 0.7398855 0.3487168 0.6040968 0.01482213 0.03164555 0.01318859
## 93     93 0.7402665 0.3481175 0.6045246 0.01479356 0.03121699 0.01314971
## 94     94 0.7403956 0.3479138 0.6047259 0.01480305 0.03151144 0.01323469
## 95     95 0.7404614 0.3478059 0.6048156 0.01488469 0.03182473 0.01317060
## 96     96 0.7405442 0.3476607 0.6049880 0.01509268 0.03189307 0.01317486
## 97     97 0.7404041 0.3478886 0.6048793 0.01518425 0.03227635 0.01322625
## 98     98 0.7403376 0.3480042 0.6048031 0.01532138 0.03230502 0.01328243
## 99     99 0.7401827 0.3482994 0.6046496 0.01486137 0.03204957 0.01289540
## 100   100 0.7400857 0.3484525 0.6045616 0.01481207 0.03191564 0.01295719
## 101   101 0.7401926 0.3482645 0.6047691 0.01495039 0.03212990 0.01296308
## 102   102 0.7399680 0.3486326 0.6046513 0.01473494 0.03167372 0.01270246
## 103   103 0.7401335 0.3483619 0.6047827 0.01424690 0.03120442 0.01232457
## 104   104 0.7400752 0.3484849 0.6046366 0.01428013 0.03120338 0.01232267
## 105   105 0.7401760 0.3483049 0.6046925 0.01426414 0.03096810 0.01230321
## 106   106 0.7403650 0.3479656 0.6049233 0.01418876 0.03112409 0.01216154
## 107   107 0.7406721 0.3474872 0.6052454 0.01427115 0.03123553 0.01229226
## 108   108 0.7404726 0.3478144 0.6051806 0.01449343 0.03136863 0.01240524
## 109   109 0.7404297 0.3478805 0.6050557 0.01426130 0.03129106 0.01212624
## 110   110 0.7404744 0.3478154 0.6051349 0.01415329 0.03126182 0.01199968
## 111   111 0.7405905 0.3476459 0.6052344 0.01447955 0.03173280 0.01224684
## 112   112 0.7406641 0.3475541 0.6051837 0.01438562 0.03164378 0.01217678
## 113   113 0.7407282 0.3474577 0.6052902 0.01434063 0.03124113 0.01230970
## 114   114 0.7406321 0.3476200 0.6052920 0.01426848 0.03152397 0.01238254
## 115   115 0.7403797 0.3480315 0.6050552 0.01412593 0.03142046 0.01223857
## 116   116 0.7404286 0.3479250 0.6049863 0.01390161 0.03132668 0.01215123
## 117   117 0.7402902 0.3481511 0.6047545 0.01378987 0.03102504 0.01211245
## 118   118 0.7404220 0.3479552 0.6049435 0.01388729 0.03123744 0.01226338
## 119   119 0.7404244 0.3479436 0.6049393 0.01406794 0.03119171 0.01244603
## 120   120 0.7403340 0.3480645 0.6048932 0.01399137 0.03113171 0.01248198
## 121   121 0.7403054 0.3481067 0.6048640 0.01420235 0.03165626 0.01278894
## 122   122 0.7402291 0.3482787 0.6048668 0.01433633 0.03176540 0.01278824
## 123   123 0.7401002 0.3484981 0.6047184 0.01464058 0.03202343 0.01314665
## 124   124 0.7400681 0.3485446 0.6047511 0.01452832 0.03174295 0.01318153
## 125   125 0.7403252 0.3481154 0.6048576 0.01456212 0.03172349 0.01310864
## 126   126 0.7404818 0.3478893 0.6049314 0.01449742 0.03171828 0.01309146
## 127   127 0.7406465 0.3476243 0.6049670 0.01434997 0.03152348 0.01293378
## 128   128 0.7406775 0.3476046 0.6050013 0.01435298 0.03153023 0.01295351
## 129   129 0.7407795 0.3474505 0.6050926 0.01455418 0.03131525 0.01311916
## 130   130 0.7407557 0.3474788 0.6051056 0.01461832 0.03141698 0.01317153
## 131   131 0.7408605 0.3473356 0.6052207 0.01478594 0.03191888 0.01327466
## 132   132 0.7411277 0.3468813 0.6054199 0.01481102 0.03183686 0.01331079
## 133   133 0.7413932 0.3464442 0.6057261 0.01463699 0.03154826 0.01318306
## 134   134 0.7416185 0.3460759 0.6058591 0.01456647 0.03158432 0.01307182
## 135   135 0.7415010 0.3462904 0.6056988 0.01453559 0.03144616 0.01298026
## 136   136 0.7415211 0.3462449 0.6056764 0.01446965 0.03140386 0.01279437
## 137   137 0.7414934 0.3463065 0.6057059 0.01455253 0.03138836 0.01293713
## 138   138 0.7415610 0.3462287 0.6056469 0.01473020 0.03165008 0.01310739
## 139   139 0.7415924 0.3461772 0.6056406 0.01480646 0.03143874 0.01309670
## 140   140 0.7416516 0.3460831 0.6056587 0.01493133 0.03149358 0.01322962
## 141   141 0.7417051 0.3460161 0.6056058 0.01508813 0.03147926 0.01335005
## 142   142 0.7416298 0.3461343 0.6054965 0.01514457 0.03141511 0.01337996
## 143   143 0.7418121 0.3458547 0.6056648 0.01523791 0.03132653 0.01349497
## 144   144 0.7420042 0.3455411 0.6058228 0.01522371 0.03129664 0.01339135
## 145   145 0.7420012 0.3455299 0.6057338 0.01525492 0.03149603 0.01341767
## 146   146 0.7421049 0.3453567 0.6058472 0.01533352 0.03167308 0.01353283
## 147   147 0.7421999 0.3451971 0.6059699 0.01559763 0.03175736 0.01375951
## 148   148 0.7422219 0.3451547 0.6059013 0.01559600 0.03160652 0.01373256
## 149   149 0.7421719 0.3452392 0.6058335 0.01556997 0.03170939 0.01371660
## 150   150 0.7423998 0.3448690 0.6060640 0.01548940 0.03149813 0.01367436
## 151   151 0.7425577 0.3446069 0.6061831 0.01526399 0.03134183 0.01347205
## 152   152 0.7426242 0.3444803 0.6062175 0.01519254 0.03130991 0.01352158
## 153   153 0.7426153 0.3444849 0.6061703 0.01505661 0.03116448 0.01341032
## 154   154 0.7425889 0.3445279 0.6060987 0.01507235 0.03119754 0.01340194
## 155   155 0.7426537 0.3444161 0.6060577 0.01503211 0.03113764 0.01336246
## 156   156 0.7428219 0.3441394 0.6061686 0.01502309 0.03113957 0.01331233
## 157   157 0.7428600 0.3440786 0.6061170 0.01506897 0.03120300 0.01334845
## 158   158 0.7428506 0.3441142 0.6060850 0.01489865 0.03106978 0.01322327
## 159   159 0.7428709 0.3440968 0.6060464 0.01467978 0.03084769 0.01307693
## 160   160 0.7429358 0.3439842 0.6060601 0.01474522 0.03088552 0.01317890
## 161   161 0.7430546 0.3437944 0.6062223 0.01477976 0.03107018 0.01322134
## 162   162 0.7430585 0.3438026 0.6062239 0.01491060 0.03126730 0.01341688
## 163   163 0.7429418 0.3440144 0.6060910 0.01489539 0.03121015 0.01336188
## 164   164 0.7429847 0.3439597 0.6060606 0.01498762 0.03129350 0.01341550
## 165   165 0.7429476 0.3440354 0.6059807 0.01499182 0.03138948 0.01341150
## 166   166 0.7429303 0.3440589 0.6059932 0.01499408 0.03129088 0.01338146
## 167   167 0.7429537 0.3440297 0.6060139 0.01511188 0.03154950 0.01349461
## 168   168 0.7430168 0.3439383 0.6060148 0.01516052 0.03163326 0.01346847
## 169   169 0.7431068 0.3438150 0.6060803 0.01527207 0.03168420 0.01356663
## 170   170 0.7431074 0.3438231 0.6060972 0.01520732 0.03161898 0.01354056
## 171   171 0.7431283 0.3437906 0.6061215 0.01529601 0.03182491 0.01357445
## 172   172 0.7431369 0.3437854 0.6060850 0.01522843 0.03162933 0.01353545
## 173   173 0.7431679 0.3437390 0.6061060 0.01522405 0.03161282 0.01359088
## 174   174 0.7431786 0.3437256 0.6060851 0.01524417 0.03167552 0.01361470
## 175   175 0.7432332 0.3436323 0.6060802 0.01514649 0.03161303 0.01348709
## 176   176 0.7431701 0.3437210 0.6060365 0.01509607 0.03152481 0.01338757
## 177   177 0.7432192 0.3436462 0.6060632 0.01510626 0.03147450 0.01342325
## 178   178 0.7432090 0.3436646 0.6060605 0.01507076 0.03142510 0.01333675
## 179   179 0.7433050 0.3435124 0.6061500 0.01507685 0.03137569 0.01333094
## 180   180 0.7433533 0.3434386 0.6062052 0.01509958 0.03142680 0.01336219
## 181   181 0.7433346 0.3434778 0.6061942 0.01526759 0.03162916 0.01349042
## 182   182 0.7434409 0.3433029 0.6062563 0.01528984 0.03159930 0.01349206
## 183   183 0.7434708 0.3432642 0.6063523 0.01539224 0.03166880 0.01356686
## 184   184 0.7435729 0.3430991 0.6064001 0.01546068 0.03177531 0.01361543
## 185   185 0.7436355 0.3430069 0.6064958 0.01550938 0.03179894 0.01362611
## 186   186 0.7436350 0.3430028 0.6064829 0.01553884 0.03179766 0.01361664
## 187   187 0.7435937 0.3430723 0.6064326 0.01560876 0.03187406 0.01366571
## 188   188 0.7436434 0.3429871 0.6064586 0.01556121 0.03193682 0.01358963
## 189   189 0.7436914 0.3429218 0.6065077 0.01553542 0.03197761 0.01355191
## 190   190 0.7436667 0.3429584 0.6065156 0.01555851 0.03199734 0.01352159
## 191   191 0.7436313 0.3429972 0.6064712 0.01538935 0.03188922 0.01337729
## 192   192 0.7436527 0.3429690 0.6064792 0.01537903 0.03185653 0.01337860
## 193   193 0.7436626 0.3429559 0.6064995 0.01534510 0.03167674 0.01331683
## 194   194 0.7436417 0.3429944 0.6064703 0.01532142 0.03169376 0.01329805
## 195   195 0.7435992 0.3430573 0.6064121 0.01528537 0.03158903 0.01330234
## 196   196 0.7435270 0.3431734 0.6063406 0.01521672 0.03152423 0.01329800
## 197   197 0.7435512 0.3431413 0.6063894 0.01519308 0.03149038 0.01325151
## 198   198 0.7435452 0.3431553 0.6063952 0.01524788 0.03164432 0.01330733
## 199   199 0.7435473 0.3431585 0.6063937 0.01525021 0.03157697 0.01329670
## 200   200 0.7435602 0.3431441 0.6063728 0.01528041 0.03165486 0.01332133
## 201   201 0.7436069 0.3430679 0.6063970 0.01528882 0.03168416 0.01331720
## 202   202 0.7436039 0.3430762 0.6063747 0.01537728 0.03181580 0.01341025
## 203   203 0.7436129 0.3430644 0.6063610 0.01532804 0.03179156 0.01336434
## 204   204 0.7436245 0.3430470 0.6064024 0.01538031 0.03181569 0.01343854
## 205   205 0.7436023 0.3430820 0.6063737 0.01535258 0.03186156 0.01338926
## 206   206 0.7436521 0.3430004 0.6064292 0.01535949 0.03193848 0.01339256
## 207   207 0.7436801 0.3429593 0.6064461 0.01537046 0.03196902 0.01340616
## 208   208 0.7436879 0.3429550 0.6064336 0.01540096 0.03200618 0.01341149
## 209   209 0.7437029 0.3429358 0.6064374 0.01540072 0.03193734 0.01341080
## 210   210 0.7437139 0.3429168 0.6064476 0.01535214 0.03190807 0.01339077
## 211   211 0.7437118 0.3429198 0.6064465 0.01538974 0.03192214 0.01340304
## 212   212 0.7437175 0.3429070 0.6064385 0.01537155 0.03190504 0.01337462
## 213   213 0.7437465 0.3428555 0.6064520 0.01537442 0.03189626 0.01338156
## 214   214 0.7437286 0.3428859 0.6064303 0.01537480 0.03188941 0.01335691
## 215   215 0.7436922 0.3429395 0.6063816 0.01534676 0.03180363 0.01332834
## 216   216 0.7436732 0.3429675 0.6063696 0.01530897 0.03176279 0.01330174
## 217   217 0.7436631 0.3429866 0.6063876 0.01530102 0.03177383 0.01329785
## 218   218 0.7436976 0.3429273 0.6064206 0.01531310 0.03182881 0.01331794
## 219   219 0.7437012 0.3429249 0.6064269 0.01531953 0.03183132 0.01332384
## 220   220 0.7436988 0.3429267 0.6064309 0.01531341 0.03185490 0.01330329
## 221   221 0.7437093 0.3429110 0.6064433 0.01535698 0.03193747 0.01335169
## 222   222 0.7436958 0.3429315 0.6064429 0.01536589 0.03196301 0.01337918
## 223   223 0.7436896 0.3429430 0.6064373 0.01537708 0.03199155 0.01339421
## 224   224 0.7436885 0.3429466 0.6064327 0.01536950 0.03198249 0.01338616
## 225   225 0.7436762 0.3429667 0.6064264 0.01534809 0.03197904 0.01335714
## 226   226 0.7436865 0.3429481 0.6064388 0.01536621 0.03202983 0.01335675
## 227   227 0.7437125 0.3429066 0.6064591 0.01536721 0.03201308 0.01336770
## 228   228 0.7437030 0.3429207 0.6064592 0.01535874 0.03200725 0.01336021
## 229   229 0.7437160 0.3428982 0.6064722 0.01536325 0.03199914 0.01336798
## 230   230 0.7437152 0.3428996 0.6064765 0.01536388 0.03201147 0.01336086
## 231   231 0.7437020 0.3429207 0.6064602 0.01536391 0.03201815 0.01335912
## 232   232 0.7437026 0.3429211 0.6064543 0.01537004 0.03202617 0.01334967
## 233   233 0.7437095 0.3429094 0.6064650 0.01538046 0.03202637 0.01336097
## 234   234 0.7437014 0.3429225 0.6064605 0.01537147 0.03201019 0.01335536
## 235   235 0.7437075 0.3429118 0.6064675 0.01537769 0.03201645 0.01336572
## 236   236 0.7437075 0.3429119 0.6064647 0.01537818 0.03201369 0.01336517
## 237   237 0.7437101 0.3429086 0.6064644 0.01538230 0.03202068 0.01336713
## 238   238 0.7437119 0.3429055 0.6064625 0.01538549 0.03202128 0.01336905
## 239   239 0.7437121 0.3429051 0.6064628 0.01538583 0.03202209 0.01337002
## 240   240 0.7437119 0.3429053 0.6064635 0.01538568 0.03202256 0.01336963
##    nvmax
## 14    14
## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## Coefficients of final model:

##   (Intercept)            x4            x7            x8            x9 
## -4.004464e+00 -1.635465e-03  3.772266e-01  1.303228e-02  1.049647e-01 
##           x10           x11           x16           x17           x21 
##  4.298559e-02  6.272044e+06  2.657518e-02  4.181081e-02  3.752106e-03 
##        stat14        stat41        stat98       stat110      sqrt.x18 
## -3.246523e-02 -1.862469e-02  1.066125e-01 -1.001554e-01  7.964291e-01

Test

if (algo.forward.caret == TRUE){
  test.model(model.forward, data.test
             ,method = 'leapForward',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE)
}
## [1] "Summary of predicted values: "
##     Min.  1st Qu.   Median     Mean  3rd Qu.     Max. 
## -1.75656 -0.45584 -0.04926 -0.06832  0.31879  1.61618 
## [1] "leapForward  Test MSE: 0.71219103708358"

Backward Elimination

Train

if (algo.backward == TRUE){
  # Takes too much time
  t1 = Sys.time()
  
  model.backward = step(model.full, data = data.train, direction="backward", trace = 0)
  print(summary(model.backward))
  #saveRDS(model.forward,file = "model_backward.rds")
  
  t2 = Sys.time()
  print (paste("Time taken for Backward Elimination: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.backward, data.train)
}

Test

if (algo.backward == TRUE){
  test.model(model.backard, data.test, "Backward Elimination")
}

Backward Elimination with CV (w/ full train)

Train

if (algo.backward.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train
                                   ,method = "leapBackward"
                                   ,feature.names =  feature.names)
  model.backward = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 13 on full training set
##     nvmax      RMSE  Rsquared       MAE     RMSESD RsquaredSD      MAESD
## 1       1 0.9365643 0.1236737 0.7536160 0.01668298 0.01993896 0.01279423
## 2       2 0.9079494 0.1770387 0.7298664 0.01750665 0.02892992 0.01357292
## 3       3 0.8944193 0.2008249 0.7155952 0.01901631 0.02919065 0.01320105
## 4       4 0.8742691 0.2363913 0.6937420 0.02185141 0.03097169 0.01484363
## 5       5 0.8638186 0.2544505 0.6856338 0.02338283 0.03597771 0.01555964
## 6       6 0.8614669 0.2584423 0.6838031 0.02330853 0.03655409 0.01630994
## 7       7 0.8616264 0.2581201 0.6838827 0.02252992 0.03576400 0.01543925
## 8       8 0.8606372 0.2598909 0.6832436 0.02346111 0.03617819 0.01574605
## 9       9 0.8589201 0.2629713 0.6817973 0.02303138 0.03595457 0.01457108
## 10     10 0.8560569 0.2677599 0.6802123 0.02220196 0.03458445 0.01366388
## 11     11 0.8566254 0.2668271 0.6804815 0.02260781 0.03462453 0.01470736
## 12     12 0.8560859 0.2677589 0.6800820 0.02254281 0.03351252 0.01463182
## 13     13 0.8555747 0.2686228 0.6800973 0.02206305 0.03335563 0.01510989
## 14     14 0.8560136 0.2678391 0.6807736 0.02126001 0.03146312 0.01439859
## 15     15 0.8563294 0.2673560 0.6807459 0.02133984 0.03211955 0.01446735
## 16     16 0.8570050 0.2661981 0.6809689 0.02128136 0.03145458 0.01464079
## 17     17 0.8578058 0.2648974 0.6805910 0.02202643 0.03156827 0.01527678
## 18     18 0.8587239 0.2633832 0.6814539 0.02185671 0.03192988 0.01480811
## 19     19 0.8589868 0.2629528 0.6816334 0.02177558 0.03061139 0.01507559
## 20     20 0.8594587 0.2621429 0.6820007 0.02169319 0.03064491 0.01488072
## 21     21 0.8592165 0.2625859 0.6817450 0.02122121 0.03049068 0.01430862
## 22     22 0.8596630 0.2618405 0.6820149 0.02058561 0.02990860 0.01377926
## 23     23 0.8599657 0.2613521 0.6822373 0.01991861 0.02926440 0.01327024
## 24     24 0.8600141 0.2613129 0.6820905 0.02005046 0.02918015 0.01359636
## 25     25 0.8596311 0.2619713 0.6818048 0.01974444 0.02921075 0.01345087
## 26     26 0.8596415 0.2619258 0.6820984 0.01988885 0.02932988 0.01349023
## 27     27 0.8601784 0.2610319 0.6828302 0.01960764 0.02864057 0.01344128
## 28     28 0.8604082 0.2606614 0.6831892 0.01936522 0.02824562 0.01328344
## 29     29 0.8605567 0.2604734 0.6834480 0.01942296 0.02899682 0.01300198
## 30     30 0.8606018 0.2604001 0.6832609 0.01898791 0.02857932 0.01277832
## 31     31 0.8612467 0.2593023 0.6839339 0.01930531 0.02858065 0.01320908
## 32     32 0.8614199 0.2590512 0.6837813 0.01910704 0.02868758 0.01322392
## 33     33 0.8616403 0.2586923 0.6839795 0.01931191 0.02926995 0.01371658
## 34     34 0.8618072 0.2584278 0.6840556 0.01911459 0.02897868 0.01353975
## 35     35 0.8620435 0.2581013 0.6843567 0.01928940 0.02864242 0.01352513
## 36     36 0.8622495 0.2577702 0.6843889 0.01940811 0.02880614 0.01344987
## 37     37 0.8624435 0.2574835 0.6844280 0.01968755 0.02931613 0.01353697
## 38     38 0.8623582 0.2576082 0.6843283 0.01965300 0.02936515 0.01368578
## 39     39 0.8624024 0.2575331 0.6843141 0.01959292 0.02953076 0.01387019
## 40     40 0.8625883 0.2572104 0.6845694 0.01946659 0.02916949 0.01391406
## 41     41 0.8631788 0.2562752 0.6850405 0.01917295 0.02919017 0.01366859
## 42     42 0.8634754 0.2558213 0.6852612 0.01910378 0.02959159 0.01363498
## 43     43 0.8636613 0.2555304 0.6852846 0.01920377 0.02970908 0.01368077
## 44     44 0.8637282 0.2555082 0.6852873 0.01952407 0.02981430 0.01390162
## 45     45 0.8636906 0.2555941 0.6848678 0.01986005 0.02986920 0.01432617
## 46     46 0.8639723 0.2551614 0.6850879 0.01958558 0.03023105 0.01435965
## 47     47 0.8641834 0.2548092 0.6851931 0.01959379 0.02954692 0.01427494
## 48     48 0.8644121 0.2544614 0.6853580 0.01965992 0.02947512 0.01397826
## 49     49 0.8643245 0.2546313 0.6851708 0.01983891 0.02931367 0.01409606
## 50     50 0.8643579 0.2546082 0.6849973 0.01991314 0.02917845 0.01413896
## 51     51 0.8646171 0.2542466 0.6850583 0.01987016 0.02925354 0.01413039
## 52     52 0.8649418 0.2537512 0.6852799 0.01990932 0.02914966 0.01418377
## 53     53 0.8651187 0.2534767 0.6854653 0.01999842 0.02974757 0.01424932
## 54     54 0.8653182 0.2531340 0.6853590 0.01970101 0.02933692 0.01413195
## 55     55 0.8651745 0.2533593 0.6853175 0.01976652 0.02917789 0.01427693
## 56     56 0.8651066 0.2535013 0.6850555 0.01956409 0.02945799 0.01415917
## 57     57 0.8650266 0.2536217 0.6849551 0.01943408 0.02922926 0.01426336
## 58     58 0.8652509 0.2533027 0.6851302 0.01964316 0.02977978 0.01434250
## 59     59 0.8655877 0.2528090 0.6852290 0.01971564 0.02995004 0.01443446
## 60     60 0.8651411 0.2535738 0.6850008 0.02007599 0.03061746 0.01474648
## 61     61 0.8651878 0.2535046 0.6847487 0.02009185 0.03088458 0.01480147
## 62     62 0.8651009 0.2536122 0.6845954 0.01985088 0.03037240 0.01447021
## 63     63 0.8652732 0.2533212 0.6846163 0.01985302 0.02975371 0.01418246
## 64     64 0.8655743 0.2528848 0.6850243 0.02009465 0.03003344 0.01404808
## 65     65 0.8657058 0.2527027 0.6851630 0.02009702 0.02984747 0.01414860
## 66     66 0.8660804 0.2521008 0.6854923 0.01984088 0.02935869 0.01387186
## 67     67 0.8663039 0.2517924 0.6855992 0.01997432 0.02954874 0.01395594
## 68     68 0.8664655 0.2515667 0.6857538 0.01993123 0.02956265 0.01394807
## 69     69 0.8667758 0.2510711 0.6860430 0.01977697 0.02898650 0.01371124
## 70     70 0.8669990 0.2507069 0.6862663 0.01960530 0.02887835 0.01360332
## 71     71 0.8669769 0.2507896 0.6862602 0.02021346 0.02927246 0.01395888
## 72     72 0.8669798 0.2507606 0.6863925 0.02016520 0.02864704 0.01398512
## 73     73 0.8671082 0.2505710 0.6864267 0.01990594 0.02859050 0.01384779
## 74     74 0.8670545 0.2506987 0.6862886 0.01982708 0.02809689 0.01392881
## 75     75 0.8671834 0.2505173 0.6863800 0.01994594 0.02787082 0.01411895
## 76     76 0.8673139 0.2503441 0.6865654 0.02032950 0.02841831 0.01433087
## 77     77 0.8671478 0.2505929 0.6865048 0.02031057 0.02821585 0.01415583
## 78     78 0.8672619 0.2504626 0.6866454 0.02057055 0.02836735 0.01425723
## 79     79 0.8672697 0.2504675 0.6866593 0.02048630 0.02831568 0.01412148
## 80     80 0.8673461 0.2503354 0.6866620 0.02033219 0.02814677 0.01415362
## 81     81 0.8676538 0.2498809 0.6869297 0.02054122 0.02816786 0.01429340
## 82     82 0.8677684 0.2496864 0.6872315 0.02050042 0.02833130 0.01412270
## 83     83 0.8680234 0.2493009 0.6875237 0.02048851 0.02847641 0.01402754
## 84     84 0.8681408 0.2491012 0.6877016 0.02045192 0.02870687 0.01401450
## 85     85 0.8684249 0.2486827 0.6878645 0.02036558 0.02858416 0.01385894
## 86     86 0.8684295 0.2486964 0.6878914 0.02031005 0.02831252 0.01403434
## 87     87 0.8684231 0.2487130 0.6877863 0.02037673 0.02843159 0.01392244
## 88     88 0.8687012 0.2483006 0.6879341 0.02067387 0.02892182 0.01403264
## 89     89 0.8687366 0.2482522 0.6880747 0.02090900 0.02892512 0.01424256
## 90     90 0.8686152 0.2484404 0.6878444 0.02092654 0.02893318 0.01419869
## 91     91 0.8687082 0.2482928 0.6879710 0.02090623 0.02885509 0.01416358
## 92     92 0.8688310 0.2480838 0.6880724 0.02097849 0.02930971 0.01412548
## 93     93 0.8687434 0.2482475 0.6881554 0.02088629 0.02940647 0.01413087
## 94     94 0.8689641 0.2479182 0.6883991 0.02084887 0.02952348 0.01406476
## 95     95 0.8691168 0.2476637 0.6884143 0.02084641 0.02946343 0.01410450
## 96     96 0.8691215 0.2476573 0.6883300 0.02090486 0.02987236 0.01412492
## 97     97 0.8692286 0.2474543 0.6884753 0.02080377 0.02935685 0.01433756
## 98     98 0.8692918 0.2473472 0.6884991 0.02065578 0.02913221 0.01441377
## 99     99 0.8693945 0.2471866 0.6885239 0.02057941 0.02871775 0.01447196
## 100   100 0.8694070 0.2471820 0.6886260 0.02060200 0.02875679 0.01455577
## 101   101 0.8695626 0.2469518 0.6887808 0.02071882 0.02901436 0.01470188
## 102   102 0.8694424 0.2471672 0.6887476 0.02089387 0.02938671 0.01484390
## 103   103 0.8694245 0.2471735 0.6887179 0.02074125 0.02901172 0.01468136
## 104   104 0.8697781 0.2466338 0.6890031 0.02083421 0.02923737 0.01456960
## 105   105 0.8697800 0.2466239 0.6889986 0.02083091 0.02932740 0.01454599
## 106   106 0.8700353 0.2462445 0.6891863 0.02104588 0.02952956 0.01462267
## 107   107 0.8701177 0.2460971 0.6891338 0.02111483 0.02951981 0.01459164
## 108   108 0.8700693 0.2462098 0.6891640 0.02115808 0.02973450 0.01458971
## 109   109 0.8702427 0.2459299 0.6893752 0.02111730 0.02964118 0.01458730
## 110   110 0.8702009 0.2459816 0.6894865 0.02104191 0.02930547 0.01452487
## 111   111 0.8702717 0.2458818 0.6895196 0.02099416 0.02921412 0.01452344
## 112   112 0.8702878 0.2458833 0.6895516 0.02100260 0.02931748 0.01462306
## 113   113 0.8704051 0.2456967 0.6895528 0.02089086 0.02883122 0.01466771
## 114   114 0.8704660 0.2456157 0.6896848 0.02085868 0.02873692 0.01476646
## 115   115 0.8705691 0.2454582 0.6897086 0.02080466 0.02898123 0.01469216
## 116   116 0.8704245 0.2456811 0.6897001 0.02090107 0.02898495 0.01477699
## 117   117 0.8705337 0.2455288 0.6896877 0.02090064 0.02914250 0.01467981
## 118   118 0.8705972 0.2454513 0.6897489 0.02084018 0.02906281 0.01461465
## 119   119 0.8706015 0.2454685 0.6897421 0.02093643 0.02948109 0.01450446
## 120   120 0.8709149 0.2449906 0.6898817 0.02091743 0.02955269 0.01443893
## 121   121 0.8709436 0.2449621 0.6899169 0.02079003 0.02946770 0.01437891
## 122   122 0.8708749 0.2450743 0.6899414 0.02080614 0.02947978 0.01438911
## 123   123 0.8710680 0.2447978 0.6900101 0.02089320 0.02952087 0.01445114
## 124   124 0.8712810 0.2444629 0.6902011 0.02080993 0.02941678 0.01436672
## 125   125 0.8714417 0.2442130 0.6903130 0.02098838 0.02966022 0.01445358
## 126   126 0.8714014 0.2442779 0.6902024 0.02081466 0.02928450 0.01427327
## 127   127 0.8712901 0.2444463 0.6900936 0.02069467 0.02909246 0.01416436
## 128   128 0.8711833 0.2446206 0.6899250 0.02071078 0.02898921 0.01409420
## 129   129 0.8711714 0.2446637 0.6900166 0.02081552 0.02916502 0.01406494
## 130   130 0.8711050 0.2447754 0.6899504 0.02081981 0.02910637 0.01409353
## 131   131 0.8712194 0.2446082 0.6900343 0.02090774 0.02931705 0.01422291
## 132   132 0.8711530 0.2447067 0.6900396 0.02085214 0.02918593 0.01430432
## 133   133 0.8711106 0.2447817 0.6899654 0.02099022 0.02932377 0.01426931
## 134   134 0.8711977 0.2446529 0.6899804 0.02083924 0.02899918 0.01423720
## 135   135 0.8713043 0.2445005 0.6900692 0.02057948 0.02855585 0.01402916
## 136   136 0.8712761 0.2445444 0.6900407 0.02054929 0.02833556 0.01408714
## 137   137 0.8712474 0.2445959 0.6900202 0.02067464 0.02858343 0.01403636
## 138   138 0.8711912 0.2446927 0.6898919 0.02075832 0.02843381 0.01418100
## 139   139 0.8713295 0.2444960 0.6900917 0.02064746 0.02816696 0.01407152
## 140   140 0.8712383 0.2446463 0.6900928 0.02074840 0.02833721 0.01403621
## 141   141 0.8713070 0.2445447 0.6900811 0.02069215 0.02809597 0.01395169
## 142   142 0.8712645 0.2446192 0.6899888 0.02065677 0.02802275 0.01398336
## 143   143 0.8713356 0.2444983 0.6899969 0.02059458 0.02810271 0.01393127
## 144   144 0.8713169 0.2445082 0.6901234 0.02055057 0.02821877 0.01390427
## 145   145 0.8713925 0.2444003 0.6902265 0.02055085 0.02829057 0.01382527
## 146   146 0.8713433 0.2444627 0.6903059 0.02046646 0.02802768 0.01376804
## 147   147 0.8712520 0.2446196 0.6902213 0.02051285 0.02803622 0.01380768
## 148   148 0.8711324 0.2448208 0.6901144 0.02039050 0.02791873 0.01364614
## 149   149 0.8711975 0.2447368 0.6902198 0.02052032 0.02816039 0.01379495
## 150   150 0.8711879 0.2447435 0.6902846 0.02055890 0.02814802 0.01381923
## 151   151 0.8710743 0.2449411 0.6901716 0.02054134 0.02820099 0.01382262
## 152   152 0.8711917 0.2447529 0.6901754 0.02056990 0.02821888 0.01395043
## 153   153 0.8712597 0.2446552 0.6902587 0.02073971 0.02838072 0.01408055
## 154   154 0.8712807 0.2446489 0.6902880 0.02089951 0.02845465 0.01416981
## 155   155 0.8711507 0.2448433 0.6901109 0.02096027 0.02846472 0.01415984
## 156   156 0.8711705 0.2448291 0.6900400 0.02100605 0.02843497 0.01419098
## 157   157 0.8711036 0.2449448 0.6899937 0.02100978 0.02846470 0.01413015
## 158   158 0.8711764 0.2448273 0.6901460 0.02100624 0.02840651 0.01420823
## 159   159 0.8712169 0.2447865 0.6901696 0.02112536 0.02858491 0.01424374
## 160   160 0.8712945 0.2446766 0.6902077 0.02123372 0.02882578 0.01424542
## 161   161 0.8712094 0.2447998 0.6900654 0.02125411 0.02877984 0.01434739
## 162   162 0.8712629 0.2447297 0.6901386 0.02122538 0.02882386 0.01434178
## 163   163 0.8711825 0.2448439 0.6901538 0.02130267 0.02885490 0.01447740
## 164   164 0.8711829 0.2448342 0.6901762 0.02127197 0.02881170 0.01444986
## 165   165 0.8712576 0.2447245 0.6902907 0.02130658 0.02887700 0.01441796
## 166   166 0.8713113 0.2446355 0.6903306 0.02136225 0.02894989 0.01437565
## 167   167 0.8712216 0.2447760 0.6902309 0.02134884 0.02899630 0.01442554
## 168   168 0.8711338 0.2449167 0.6901520 0.02144511 0.02920371 0.01440488
## 169   169 0.8710861 0.2449973 0.6900785 0.02141829 0.02920969 0.01432771
## 170   170 0.8711177 0.2449500 0.6900418 0.02135538 0.02916873 0.01423370
## 171   171 0.8711211 0.2449541 0.6900218 0.02132538 0.02932554 0.01413091
## 172   172 0.8710780 0.2450230 0.6899941 0.02129128 0.02939646 0.01405897
## 173   173 0.8709928 0.2451605 0.6899367 0.02124310 0.02937252 0.01406856
## 174   174 0.8710301 0.2450961 0.6899956 0.02117073 0.02935160 0.01397037
## 175   175 0.8710716 0.2450239 0.6900741 0.02113136 0.02924623 0.01394193
## 176   176 0.8710630 0.2450432 0.6900440 0.02109010 0.02922176 0.01392667
## 177   177 0.8710327 0.2450848 0.6900352 0.02111557 0.02926948 0.01395574
## 178   178 0.8710528 0.2450638 0.6900243 0.02116066 0.02942807 0.01397555
## 179   179 0.8710596 0.2450525 0.6900270 0.02118822 0.02946012 0.01398807
## 180   180 0.8710546 0.2450577 0.6899895 0.02122769 0.02941479 0.01401486
## 181   181 0.8710621 0.2450610 0.6899539 0.02113441 0.02930182 0.01393759
## 182   182 0.8710273 0.2451195 0.6899044 0.02113192 0.02932570 0.01387505
## 183   183 0.8710678 0.2450663 0.6898991 0.02116056 0.02934898 0.01389266
## 184   184 0.8710276 0.2451175 0.6898601 0.02103778 0.02919606 0.01382968
## 185   185 0.8709995 0.2451600 0.6898525 0.02103351 0.02910620 0.01381127
## 186   186 0.8709884 0.2451744 0.6898756 0.02102556 0.02914508 0.01378116
## 187   187 0.8709916 0.2451660 0.6898806 0.02108584 0.02925044 0.01380207
## 188   188 0.8709931 0.2451716 0.6899296 0.02110156 0.02925602 0.01386441
## 189   189 0.8710116 0.2451511 0.6899808 0.02109192 0.02924510 0.01391869
## 190   190 0.8710250 0.2451236 0.6899950 0.02104918 0.02916591 0.01390334
## 191   191 0.8709487 0.2452523 0.6899603 0.02110662 0.02917305 0.01403939
## 192   192 0.8709544 0.2452412 0.6899522 0.02111542 0.02914872 0.01401714
## 193   193 0.8709757 0.2452073 0.6899711 0.02108689 0.02905452 0.01400590
## 194   194 0.8709095 0.2453121 0.6898963 0.02114777 0.02914047 0.01408294
## 195   195 0.8709330 0.2452625 0.6899183 0.02110893 0.02901316 0.01403361
## 196   196 0.8709160 0.2452925 0.6898779 0.02111789 0.02900687 0.01404222
## 197   197 0.8709516 0.2452424 0.6898881 0.02111298 0.02901593 0.01403159
## 198   198 0.8709218 0.2452872 0.6898667 0.02113950 0.02900133 0.01407111
## 199   199 0.8708782 0.2453475 0.6898381 0.02113509 0.02902172 0.01405424
## 200   200 0.8709196 0.2452859 0.6899149 0.02111217 0.02898765 0.01403278
## 201   201 0.8709730 0.2452037 0.6899384 0.02109812 0.02897965 0.01405802
## 202   202 0.8708995 0.2453109 0.6898949 0.02108571 0.02897273 0.01404901
## 203   203 0.8708754 0.2453524 0.6898682 0.02106880 0.02900676 0.01399838
## 204   204 0.8708616 0.2453799 0.6898729 0.02106382 0.02893318 0.01400573
## 205   205 0.8708489 0.2454034 0.6898480 0.02109830 0.02899896 0.01403535
## 206   206 0.8708463 0.2454086 0.6898147 0.02109298 0.02900501 0.01405162
## 207   207 0.8708843 0.2453488 0.6898349 0.02108793 0.02894228 0.01407714
## 208   208 0.8708767 0.2453638 0.6898140 0.02106154 0.02888536 0.01407658
## 209   209 0.8708811 0.2453599 0.6898047 0.02106125 0.02889934 0.01404389
## 210   210 0.8708825 0.2453493 0.6898012 0.02100837 0.02888897 0.01402877
## 211   211 0.8708911 0.2453340 0.6898066 0.02100218 0.02885073 0.01400499
## 212   212 0.8709106 0.2453006 0.6898119 0.02097700 0.02878977 0.01397276
## 213   213 0.8709249 0.2452777 0.6898214 0.02100077 0.02874207 0.01398078
## 214   214 0.8709303 0.2452699 0.6898206 0.02096002 0.02869285 0.01393806
## 215   215 0.8709433 0.2452467 0.6898224 0.02095978 0.02868184 0.01394823
## 216   216 0.8709181 0.2452826 0.6898032 0.02091984 0.02862750 0.01391265
## 217   217 0.8709383 0.2452499 0.6898019 0.02090191 0.02857891 0.01389143
## 218   218 0.8709594 0.2452213 0.6898122 0.02092089 0.02859895 0.01389152
## 219   219 0.8709504 0.2452379 0.6898225 0.02095061 0.02860147 0.01392855
## 220   220 0.8709595 0.2452215 0.6898295 0.02091165 0.02857674 0.01389363
## 221   221 0.8709620 0.2452193 0.6898415 0.02091774 0.02861461 0.01390884
## 222   222 0.8709319 0.2452631 0.6898199 0.02092204 0.02862018 0.01392612
## 223   223 0.8709544 0.2452302 0.6898368 0.02091255 0.02862773 0.01391731
## 224   224 0.8709725 0.2451994 0.6898613 0.02087188 0.02860715 0.01387450
## 225   225 0.8709869 0.2451785 0.6898824 0.02088212 0.02864416 0.01388276
## 226   226 0.8709852 0.2451818 0.6898890 0.02087977 0.02866299 0.01387216
## 227   227 0.8709797 0.2451895 0.6898882 0.02086446 0.02866466 0.01385896
## 228   228 0.8709643 0.2452123 0.6898701 0.02087165 0.02869113 0.01385553
## 229   229 0.8709644 0.2452103 0.6898638 0.02085810 0.02867718 0.01384820
## 230   230 0.8709578 0.2452244 0.6898649 0.02087034 0.02869888 0.01386252
## 231   231 0.8709710 0.2452039 0.6898742 0.02086966 0.02867474 0.01386523
## 232   232 0.8709715 0.2452026 0.6898689 0.02086836 0.02866868 0.01385658
## 233   233 0.8709690 0.2452062 0.6898742 0.02086723 0.02865404 0.01385623
## 234   234 0.8709601 0.2452207 0.6898732 0.02087161 0.02865150 0.01385151
## 235   235 0.8709550 0.2452287 0.6898716 0.02086803 0.02864109 0.01384866
## 236   236 0.8709448 0.2452437 0.6898626 0.02086830 0.02862462 0.01385545
## 237   237 0.8709466 0.2452403 0.6898668 0.02087086 0.02862439 0.01386168
## 238   238 0.8709469 0.2452393 0.6898647 0.02086630 0.02861955 0.01386117
## 239   239 0.8709465 0.2452396 0.6898656 0.02086696 0.02862015 0.01386082
## 240   240 0.8709482 0.2452370 0.6898676 0.02086412 0.02861708 0.01385710
##    nvmax
## 13    13
## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## Coefficients of final model:

##   (Intercept)            x4            x7            x8            x9 
## -3.832583e+00 -1.336998e-03  3.502871e-01  1.316519e-02  1.026206e-01 
##           x10           x11           x16           x17           x21 
##  3.532314e-02  6.547159e+06  2.676086e-02  3.808197e-02  3.363089e-03 
##        stat14        stat98       stat110      sqrt.x18 
## -2.807359e-02  1.008003e-01 -9.721343e-02  7.689960e-01

Test

if (algo.backward.caret == TRUE){
  test.model(model.backward, data.test
             ,method = 'leapBackward',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE)
}
## [1] "Summary of predicted values: "
##      Min.   1st Qu.    Median      Mean   3rd Qu.      Max. 
## -1.615548 -0.383649  0.009481 -0.010402  0.359098  1.565338 
## [1] "leapBackward  Test MSE: 0.707385129532587"

Backward Elimination with CV (w/ filtered train)

Train

if (algo.backward.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train2
                                   ,method =  "leapBackward"
                                   ,feature.names = feature.names)
  model.backward = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 17 on full training set
##     nvmax      RMSE  Rsquared       MAE     RMSESD RsquaredSD      MAESD
## 1       1 0.8409023 0.1577218 0.6879777 0.01268808 0.03072104 0.01129038
## 2       2 0.8033473 0.2319526 0.6608168 0.01175259 0.03593694 0.01051046
## 3       3 0.7837206 0.2693743 0.6423414 0.01806620 0.04565986 0.01381834
## 4       4 0.7609968 0.3105987 0.6196852 0.01993332 0.04660860 0.01470158
## 5       5 0.7484550 0.3326744 0.6098049 0.01972914 0.04767659 0.01512932
## 6       6 0.7445668 0.3396824 0.6073199 0.01970809 0.04865528 0.01514497
## 7       7 0.7443466 0.3399573 0.6072336 0.01890122 0.04736629 0.01403613
## 8       8 0.7424917 0.3430195 0.6063003 0.01804282 0.04515576 0.01382522
## 9       9 0.7401689 0.3468840 0.6048158 0.01731572 0.04337614 0.01408610
## 10     10 0.7365026 0.3532011 0.6026346 0.01616534 0.04154642 0.01295357
## 11     11 0.7376458 0.3512336 0.6032208 0.01601369 0.04073140 0.01253736
## 12     12 0.7384233 0.3498306 0.6035382 0.01612959 0.04097902 0.01250952
## 13     13 0.7379167 0.3507470 0.6030363 0.01664144 0.04048830 0.01313803
## 14     14 0.7366512 0.3529638 0.6023852 0.01627358 0.03987887 0.01271745
## 15     15 0.7363301 0.3534808 0.6018782 0.01553689 0.03820284 0.01210340
## 16     16 0.7366222 0.3530071 0.6022254 0.01480923 0.03657084 0.01189792
## 17     17 0.7359792 0.3540951 0.6018725 0.01419055 0.03605513 0.01155377
## 18     18 0.7361176 0.3538452 0.6018325 0.01415101 0.03464478 0.01156232
## 19     19 0.7370058 0.3523217 0.6022445 0.01442206 0.03474940 0.01227024
## 20     20 0.7373494 0.3517097 0.6024701 0.01386563 0.03336486 0.01206019
## 21     21 0.7372872 0.3518576 0.6025976 0.01408742 0.03349182 0.01227980
## 22     22 0.7368578 0.3526703 0.6017602 0.01376350 0.03334901 0.01211062
## 23     23 0.7367504 0.3529462 0.6015381 0.01458134 0.03387280 0.01297660
## 24     24 0.7371404 0.3523316 0.6018426 0.01444733 0.03421394 0.01288883
## 25     25 0.7372072 0.3523172 0.6018999 0.01440715 0.03406879 0.01271822
## 26     26 0.7373271 0.3521229 0.6018398 0.01429050 0.03351260 0.01278040
## 27     27 0.7372127 0.3523470 0.6017150 0.01458782 0.03434620 0.01272492
## 28     28 0.7372755 0.3522307 0.6016442 0.01463069 0.03437772 0.01264547
## 29     29 0.7375749 0.3517467 0.6021127 0.01488587 0.03398231 0.01289704
## 30     30 0.7375227 0.3518716 0.6023500 0.01459128 0.03365574 0.01286878
## 31     31 0.7377705 0.3514657 0.6024597 0.01422051 0.03324631 0.01279901
## 32     32 0.7383128 0.3506111 0.6028133 0.01454079 0.03376370 0.01322200
## 33     33 0.7382718 0.3507042 0.6027945 0.01471495 0.03408913 0.01361802
## 34     34 0.7377549 0.3515500 0.6023653 0.01482602 0.03350321 0.01345569
## 35     35 0.7379240 0.3512618 0.6025720 0.01491329 0.03457184 0.01372266
## 36     36 0.7381104 0.3509596 0.6024181 0.01508856 0.03480513 0.01389831
## 37     37 0.7381141 0.3509782 0.6025277 0.01487787 0.03412089 0.01352841
## 38     38 0.7378770 0.3513823 0.6025274 0.01454885 0.03356583 0.01325738
## 39     39 0.7377048 0.3516502 0.6023915 0.01459231 0.03387719 0.01291884
## 40     40 0.7375929 0.3518995 0.6023054 0.01471345 0.03372080 0.01275177
## 41     41 0.7378914 0.3514446 0.6024821 0.01485094 0.03338336 0.01295691
## 42     42 0.7378371 0.3515504 0.6023102 0.01506818 0.03345314 0.01271449
## 43     43 0.7379491 0.3513231 0.6021885 0.01479952 0.03248226 0.01213840
## 44     44 0.7374019 0.3522785 0.6020382 0.01488189 0.03282134 0.01251166
## 45     45 0.7376586 0.3518105 0.6022347 0.01460333 0.03210624 0.01254027
## 46     46 0.7375983 0.3519461 0.6022689 0.01456510 0.03182410 0.01265723
## 47     47 0.7373981 0.3523003 0.6020758 0.01489169 0.03232813 0.01264166
## 48     48 0.7376898 0.3518168 0.6024962 0.01484189 0.03169575 0.01263589
## 49     49 0.7377216 0.3517911 0.6024667 0.01512100 0.03191734 0.01289085
## 50     50 0.7376040 0.3520472 0.6025506 0.01523184 0.03201908 0.01285872
## 51     51 0.7372365 0.3526887 0.6023352 0.01540784 0.03284209 0.01294571
## 52     52 0.7371219 0.3528779 0.6024201 0.01576571 0.03312795 0.01316877
## 53     53 0.7370394 0.3530361 0.6024870 0.01562419 0.03245547 0.01335903
## 54     54 0.7372152 0.3527328 0.6025736 0.01547709 0.03205097 0.01356658
## 55     55 0.7372708 0.3526065 0.6025575 0.01568696 0.03241263 0.01384578
## 56     56 0.7372332 0.3526753 0.6023962 0.01578725 0.03251495 0.01381077
## 57     57 0.7376137 0.3520636 0.6025758 0.01583334 0.03237593 0.01362276
## 58     58 0.7374795 0.3522699 0.6023492 0.01605502 0.03298030 0.01365373
## 59     59 0.7377744 0.3518425 0.6024588 0.01643782 0.03310926 0.01378051
## 60     60 0.7380791 0.3513853 0.6026282 0.01645696 0.03329783 0.01409137
## 61     61 0.7381397 0.3513384 0.6027551 0.01664938 0.03344053 0.01419329
## 62     62 0.7382508 0.3511727 0.6029704 0.01658724 0.03363007 0.01432887
## 63     63 0.7382576 0.3512027 0.6028737 0.01629372 0.03356103 0.01417732
## 64     64 0.7383746 0.3509849 0.6030757 0.01605154 0.03315082 0.01376910
## 65     65 0.7387327 0.3504177 0.6034507 0.01618137 0.03326129 0.01396999
## 66     66 0.7390461 0.3499040 0.6037078 0.01638624 0.03342893 0.01410269
## 67     67 0.7385045 0.3508627 0.6031545 0.01672898 0.03421736 0.01432526
## 68     68 0.7385858 0.3507369 0.6030607 0.01658705 0.03388235 0.01437774
## 69     69 0.7388391 0.3503356 0.6031353 0.01644201 0.03368935 0.01412372
## 70     70 0.7387980 0.3503680 0.6032025 0.01629353 0.03391067 0.01413948
## 71     71 0.7389873 0.3500915 0.6032766 0.01635571 0.03417024 0.01408100
## 72     72 0.7391037 0.3499219 0.6035064 0.01609167 0.03384324 0.01384300
## 73     73 0.7390076 0.3501041 0.6033828 0.01635003 0.03443825 0.01409387
## 74     74 0.7390843 0.3499814 0.6034307 0.01595343 0.03295164 0.01364662
## 75     75 0.7388027 0.3504508 0.6032022 0.01579688 0.03262158 0.01348111
## 76     76 0.7390225 0.3501074 0.6034206 0.01590821 0.03293398 0.01349360
## 77     77 0.7389030 0.3502661 0.6034050 0.01571751 0.03277041 0.01349978
## 78     78 0.7391950 0.3497614 0.6036815 0.01536125 0.03222841 0.01334885
## 79     79 0.7392425 0.3497024 0.6037829 0.01522320 0.03201525 0.01352886
## 80     80 0.7394061 0.3494400 0.6038718 0.01550948 0.03273047 0.01405218
## 81     81 0.7395108 0.3492558 0.6039439 0.01524335 0.03254848 0.01384311
## 82     82 0.7395085 0.3492685 0.6039023 0.01514255 0.03225488 0.01365647
## 83     83 0.7395349 0.3492309 0.6037019 0.01504171 0.03239066 0.01359883
## 84     84 0.7397027 0.3489361 0.6040221 0.01512988 0.03231981 0.01373316
## 85     85 0.7400295 0.3484032 0.6042821 0.01500098 0.03236451 0.01358936
## 86     86 0.7398662 0.3486751 0.6041728 0.01502200 0.03241720 0.01354784
## 87     87 0.7397505 0.3488814 0.6041123 0.01485928 0.03183120 0.01317509
## 88     88 0.7399670 0.3485393 0.6042951 0.01509531 0.03222962 0.01318533
## 89     89 0.7400661 0.3483583 0.6043356 0.01478480 0.03182382 0.01315734
## 90     90 0.7403943 0.3478274 0.6045738 0.01455955 0.03141622 0.01284217
## 91     91 0.7401238 0.3482523 0.6043072 0.01459990 0.03140993 0.01286387
## 92     92 0.7400457 0.3484523 0.6043260 0.01480801 0.03152765 0.01311500
## 93     93 0.7404453 0.3478160 0.6046014 0.01478363 0.03132169 0.01312466
## 94     94 0.7403691 0.3479306 0.6048070 0.01484601 0.03168078 0.01324328
## 95     95 0.7406790 0.3474266 0.6050102 0.01501495 0.03153065 0.01338118
## 96     96 0.7406927 0.3474062 0.6050747 0.01493761 0.03158714 0.01332951
## 97     97 0.7409219 0.3470317 0.6052091 0.01468682 0.03153482 0.01316262
## 98     98 0.7408168 0.3472237 0.6050968 0.01484045 0.03179378 0.01317662
## 99     99 0.7405184 0.3477307 0.6048749 0.01456939 0.03177738 0.01275284
## 100   100 0.7404346 0.3478943 0.6048496 0.01450319 0.03184880 0.01273761
## 101   101 0.7402881 0.3481396 0.6048540 0.01453947 0.03191112 0.01268602
## 102   102 0.7403832 0.3479879 0.6048800 0.01453767 0.03202345 0.01257459
## 103   103 0.7403688 0.3480146 0.6049395 0.01464919 0.03183649 0.01270745
## 104   104 0.7403124 0.3480989 0.6049820 0.01472580 0.03204819 0.01281036
## 105   105 0.7402172 0.3482438 0.6049548 0.01458002 0.03190200 0.01267844
## 106   106 0.7402993 0.3480933 0.6051075 0.01501857 0.03242247 0.01298598
## 107   107 0.7401874 0.3482979 0.6050092 0.01496628 0.03244739 0.01280919
## 108   108 0.7402630 0.3481861 0.6049969 0.01465175 0.03227564 0.01244856
## 109   109 0.7404468 0.3478790 0.6051420 0.01440410 0.03217258 0.01219586
## 110   110 0.7406564 0.3475380 0.6053295 0.01472767 0.03273813 0.01256121
## 111   111 0.7407610 0.3473714 0.6054144 0.01467090 0.03248219 0.01259669
## 112   112 0.7407580 0.3473811 0.6053564 0.01443215 0.03206156 0.01238517
## 113   113 0.7407080 0.3474429 0.6053652 0.01420734 0.03208182 0.01224972
## 114   114 0.7406229 0.3475908 0.6053490 0.01437824 0.03210315 0.01235196
## 115   115 0.7405596 0.3477036 0.6052809 0.01439425 0.03219187 0.01231626
## 116   116 0.7406679 0.3475347 0.6051848 0.01418109 0.03191825 0.01223038
## 117   117 0.7404972 0.3478340 0.6050197 0.01418217 0.03168933 0.01235486
## 118   118 0.7403845 0.3480213 0.6049690 0.01414636 0.03162111 0.01232143
## 119   119 0.7402303 0.3482506 0.6047466 0.01430091 0.03165942 0.01260343
## 120   120 0.7401861 0.3483384 0.6047335 0.01428693 0.03152260 0.01262279
## 121   121 0.7402103 0.3483155 0.6046222 0.01448635 0.03188687 0.01276138
## 122   122 0.7403483 0.3480836 0.6048353 0.01434917 0.03179222 0.01282640
## 123   123 0.7402045 0.3483202 0.6047277 0.01446786 0.03190504 0.01311068
## 124   124 0.7403518 0.3480793 0.6049054 0.01451475 0.03209309 0.01322338
## 125   125 0.7405213 0.3477938 0.6049572 0.01450236 0.03182728 0.01310405
## 126   126 0.7406014 0.3476707 0.6050529 0.01431562 0.03145696 0.01292992
## 127   127 0.7405324 0.3477840 0.6049756 0.01432301 0.03117267 0.01290428
## 128   128 0.7404835 0.3478769 0.6049493 0.01443204 0.03120109 0.01304867
## 129   129 0.7407094 0.3475097 0.6051054 0.01457132 0.03169228 0.01323954
## 130   130 0.7407116 0.3475143 0.6049965 0.01469228 0.03185349 0.01327648
## 131   131 0.7408535 0.3473061 0.6052189 0.01469072 0.03166311 0.01319833
## 132   132 0.7410539 0.3469893 0.6053379 0.01473804 0.03155754 0.01322387
## 133   133 0.7411810 0.3467911 0.6055590 0.01463346 0.03130477 0.01314360
## 134   134 0.7414184 0.3464083 0.6057505 0.01459540 0.03142496 0.01299147
## 135   135 0.7415048 0.3462832 0.6057333 0.01456210 0.03138466 0.01289830
## 136   136 0.7415867 0.3461395 0.6057500 0.01447461 0.03136812 0.01282086
## 137   137 0.7415149 0.3462774 0.6057140 0.01456202 0.03141484 0.01293619
## 138   138 0.7416134 0.3461474 0.6056903 0.01477512 0.03171859 0.01317331
## 139   139 0.7417650 0.3458963 0.6057407 0.01491138 0.03169137 0.01321146
## 140   140 0.7417896 0.3458590 0.6057133 0.01502446 0.03167373 0.01331165
## 141   141 0.7416891 0.3460490 0.6055564 0.01508210 0.03148584 0.01335452
## 142   142 0.7416292 0.3461382 0.6055340 0.01503479 0.03119411 0.01327337
## 143   143 0.7417116 0.3460255 0.6055365 0.01520288 0.03147391 0.01337913
## 144   144 0.7417769 0.3459220 0.6056472 0.01529707 0.03157738 0.01339249
## 145   145 0.7419946 0.3455530 0.6058434 0.01528981 0.03146605 0.01333651
## 146   146 0.7421110 0.3453583 0.6058797 0.01530408 0.03166371 0.01347253
## 147   147 0.7423485 0.3449507 0.6060911 0.01529595 0.03161342 0.01351053
## 148   148 0.7423636 0.3449139 0.6059721 0.01521092 0.03146740 0.01341186
## 149   149 0.7425405 0.3446211 0.6060859 0.01509343 0.03129561 0.01342711
## 150   150 0.7424150 0.3448364 0.6060554 0.01501784 0.03127619 0.01334663
## 151   151 0.7424141 0.3448346 0.6060622 0.01506357 0.03133307 0.01333572
## 152   152 0.7424888 0.3447051 0.6061353 0.01506561 0.03133366 0.01336135
## 153   153 0.7425556 0.3445889 0.6061403 0.01512377 0.03125195 0.01341137
## 154   154 0.7426530 0.3444160 0.6061866 0.01506126 0.03125537 0.01341753
## 155   155 0.7427099 0.3443349 0.6061559 0.01509819 0.03111321 0.01350801
## 156   156 0.7427972 0.3442023 0.6061571 0.01510239 0.03120699 0.01352352
## 157   157 0.7429206 0.3439946 0.6062349 0.01505108 0.03118317 0.01336463
## 158   158 0.7430134 0.3438544 0.6062534 0.01486129 0.03098752 0.01316730
## 159   159 0.7430210 0.3438554 0.6062399 0.01486083 0.03113717 0.01319621
## 160   160 0.7429634 0.3439434 0.6061887 0.01487255 0.03128419 0.01327055
## 161   161 0.7429806 0.3439221 0.6062216 0.01490870 0.03141110 0.01329477
## 162   162 0.7429664 0.3439669 0.6062678 0.01496166 0.03133529 0.01339820
## 163   163 0.7429365 0.3440385 0.6061456 0.01496788 0.03136039 0.01339229
## 164   164 0.7429257 0.3440758 0.6060603 0.01504588 0.03141407 0.01344777
## 165   165 0.7429236 0.3440784 0.6060079 0.01499143 0.03135895 0.01342858
## 166   166 0.7430017 0.3439519 0.6060545 0.01506918 0.03141121 0.01341857
## 167   167 0.7430000 0.3439638 0.6060575 0.01519273 0.03163045 0.01359427
## 168   168 0.7429947 0.3439801 0.6060982 0.01512208 0.03156283 0.01355600
## 169   169 0.7430206 0.3439554 0.6060677 0.01516084 0.03151923 0.01355900
## 170   170 0.7431220 0.3437971 0.6061193 0.01521663 0.03164527 0.01355039
## 171   171 0.7431651 0.3437330 0.6061491 0.01531951 0.03188253 0.01358671
## 172   172 0.7431836 0.3437089 0.6061519 0.01526135 0.03170844 0.01356894
## 173   173 0.7431549 0.3437595 0.6061103 0.01527614 0.03167681 0.01359743
## 174   174 0.7432130 0.3436674 0.6061076 0.01523227 0.03168428 0.01353175
## 175   175 0.7432499 0.3436157 0.6061330 0.01519459 0.03164073 0.01344587
## 176   176 0.7432048 0.3436708 0.6060405 0.01503029 0.03137983 0.01336362
## 177   177 0.7432390 0.3436147 0.6060741 0.01505200 0.03140119 0.01339209
## 178   178 0.7431913 0.3436909 0.6060696 0.01507066 0.03146759 0.01334215
## 179   179 0.7432746 0.3435630 0.6061199 0.01507136 0.03141377 0.01333228
## 180   180 0.7432253 0.3436496 0.6060880 0.01512654 0.03147772 0.01335056
## 181   181 0.7432560 0.3435998 0.6061304 0.01527626 0.03167757 0.01348495
## 182   182 0.7433543 0.3434354 0.6062023 0.01527211 0.03169557 0.01349290
## 183   183 0.7434614 0.3432801 0.6063157 0.01539036 0.03167976 0.01356371
## 184   184 0.7435854 0.3430769 0.6064243 0.01544842 0.03178597 0.01361645
## 185   185 0.7436355 0.3430069 0.6064958 0.01550938 0.03179894 0.01362611
## 186   186 0.7436124 0.3430396 0.6064603 0.01555809 0.03181629 0.01363334
## 187   187 0.7435689 0.3431125 0.6064102 0.01562710 0.03189475 0.01368449
## 188   188 0.7436633 0.3429568 0.6064664 0.01554611 0.03194759 0.01358226
## 189   189 0.7436492 0.3429911 0.6064843 0.01554664 0.03196917 0.01353404
## 190   190 0.7436669 0.3429598 0.6065072 0.01555446 0.03204030 0.01350482
## 191   191 0.7436279 0.3430136 0.6064554 0.01538995 0.03182775 0.01336307
## 192   192 0.7436267 0.3430279 0.6064400 0.01537315 0.03175541 0.01335028
## 193   193 0.7436532 0.3429783 0.6064858 0.01535157 0.03166984 0.01331418
## 194   194 0.7436417 0.3429944 0.6064703 0.01532142 0.03169376 0.01329805
## 195   195 0.7436183 0.3430320 0.6064395 0.01530193 0.03163488 0.01331801
## 196   196 0.7435462 0.3431481 0.6063682 0.01523332 0.03157015 0.01331371
## 197   197 0.7435638 0.3431212 0.6064025 0.01520408 0.03152700 0.01325910
## 198   198 0.7435758 0.3431076 0.6064145 0.01525276 0.03160789 0.01330619
## 199   199 0.7435970 0.3430777 0.6064297 0.01524740 0.03151943 0.01327761
## 200   200 0.7435723 0.3431259 0.6063866 0.01535008 0.03173058 0.01338095
## 201   201 0.7436250 0.3430418 0.6064051 0.01535411 0.03175655 0.01338200
## 202   202 0.7436100 0.3430681 0.6063696 0.01537196 0.03181173 0.01341399
## 203   203 0.7436129 0.3430644 0.6063610 0.01532804 0.03179156 0.01336434
## 204   204 0.7436245 0.3430470 0.6064024 0.01538031 0.03181569 0.01343854
## 205   205 0.7436023 0.3430820 0.6063737 0.01535258 0.03186156 0.01338926
## 206   206 0.7436521 0.3430004 0.6064292 0.01535949 0.03193848 0.01339256
## 207   207 0.7436801 0.3429593 0.6064461 0.01537046 0.03196902 0.01340616
## 208   208 0.7436931 0.3429467 0.6064430 0.01539443 0.03199275 0.01339819
## 209   209 0.7437028 0.3429359 0.6064464 0.01540078 0.03193750 0.01339788
## 210   210 0.7437088 0.3429251 0.6064454 0.01535859 0.03192137 0.01339388
## 211   211 0.7437118 0.3429198 0.6064465 0.01538974 0.03192214 0.01340304
## 212   212 0.7437175 0.3429070 0.6064385 0.01537155 0.03190504 0.01337462
## 213   213 0.7437175 0.3429068 0.6064143 0.01535436 0.03184461 0.01335906
## 214   214 0.7436867 0.3429525 0.6063931 0.01534535 0.03181961 0.01333518
## 215   215 0.7436678 0.3429802 0.6063712 0.01532964 0.03176031 0.01332230
## 216   216 0.7436439 0.3430165 0.6063530 0.01528882 0.03171237 0.01329207
## 217   217 0.7436677 0.3429788 0.6063870 0.01530666 0.03176973 0.01329694
## 218   218 0.7437034 0.3429187 0.6064184 0.01532022 0.03182433 0.01331454
## 219   219 0.7437012 0.3429249 0.6064269 0.01531953 0.03183132 0.01332384
## 220   220 0.7436988 0.3429267 0.6064309 0.01531341 0.03185490 0.01330329
## 221   221 0.7437093 0.3429110 0.6064433 0.01535698 0.03193747 0.01335169
## 222   222 0.7436958 0.3429315 0.6064429 0.01536589 0.03196301 0.01337918
## 223   223 0.7436896 0.3429430 0.6064373 0.01537708 0.03199155 0.01339421
## 224   224 0.7436885 0.3429466 0.6064327 0.01536950 0.03198249 0.01338616
## 225   225 0.7436762 0.3429667 0.6064264 0.01534809 0.03197904 0.01335714
## 226   226 0.7436865 0.3429481 0.6064388 0.01536621 0.03202983 0.01335675
## 227   227 0.7437125 0.3429066 0.6064591 0.01536721 0.03201308 0.01336770
## 228   228 0.7437030 0.3429207 0.6064592 0.01535874 0.03200725 0.01336021
## 229   229 0.7437106 0.3429066 0.6064647 0.01535667 0.03200362 0.01335641
## 230   230 0.7437098 0.3429080 0.6064691 0.01535732 0.03201594 0.01334940
## 231   231 0.7437054 0.3429144 0.6064655 0.01536698 0.03202970 0.01336248
## 232   232 0.7437061 0.3429145 0.6064598 0.01537330 0.03203832 0.01335318
## 233   233 0.7437095 0.3429094 0.6064650 0.01538046 0.03202637 0.01336097
## 234   234 0.7437068 0.3429137 0.6064687 0.01537520 0.03201916 0.01336015
## 235   235 0.7437111 0.3429055 0.6064710 0.01538018 0.03202291 0.01336781
## 236   236 0.7437075 0.3429119 0.6064647 0.01537818 0.03201369 0.01336517
## 237   237 0.7437101 0.3429086 0.6064644 0.01538230 0.03202068 0.01336713
## 238   238 0.7437119 0.3429055 0.6064625 0.01538549 0.03202128 0.01336905
## 239   239 0.7437121 0.3429051 0.6064628 0.01538583 0.03202209 0.01337002
## 240   240 0.7437119 0.3429053 0.6064635 0.01538568 0.03202256 0.01336963
##    nvmax
## 17    17
## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## Coefficients of final model:

##   (Intercept)            x4            x7            x8            x9 
## -4.116119e+00 -1.619117e-03  3.779949e-01  1.292227e-02  1.051306e-01 
##           x10           x11           x13           x16           x17 
##  4.277716e-02  6.539776e+06  5.066423e-03  2.612787e-02  4.365707e-02 
##           x21         stat4        stat14        stat41        stat98 
##  3.788257e-03 -1.817541e-02 -3.261225e-02 -1.897833e-02  1.064435e-01 
##       stat100       stat110      sqrt.x18 
##  1.807879e-02 -9.987636e-02  7.991268e-01

Test

if (algo.backward.caret == TRUE){
  test.model(model.backward, data.test
             ,method = 'leapBackward',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE)
}
## [1] "Summary of predicted values: "
##     Min.  1st Qu.   Median     Mean  3rd Qu.     Max. 
## -1.74771 -0.47381 -0.04996 -0.07002  0.32217  1.61661 
## [1] "leapBackward  Test MSE: 0.713899047411159"

Stepwise Selection (w/ full train)

Train

if (algo.stepwise == TRUE){
  t1 = Sys.time()
  
  model.stepwise = step(model.null, scope=list(upper=model.full), data = data.train, direction="both", trace = 0)
  print(summary(model.stepwise))
  #saveRDS(model.stepwise,file = "model_stepwise.rds")
  
  t2 = Sys.time()
  print (paste("Time taken for Stepwise Selection: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.stepwise, data.train)
}

Test

if (algo.stepwise == TRUE){
  test.model(model.stepwise, data.test, "Stepwise Selection")
}

Stepwise Selection (w/ filtered train)

Train

if (algo.stepwise == TRUE){
  t1 = Sys.time()
  
  model.stepwise2 = step(model.null2, scope=list(upper=model.full2), data = data.train2, direction="both", trace = 0)
  print(summary(model.stepwise2))
  #saveRDS(model.forward,file = "model_stepwise.rds")
  
  t2 = Sys.time()
  print (paste("Time taken for Stepwise Selection: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.stepwise2, data.train2)
}

Test

if (algo.stepwise == TRUE){
  test.model(model.stepwise2, data.test, "Stepwise Selection (2)")
}

Stepwise Selection with CV (w/ full train)

Train

if (algo.stepwise.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train
                                   ,method = "leapSeq"
                                   ,feature.names = feature.names)
  model.stepwise = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 13 on full training set
##     nvmax      RMSE  Rsquared       MAE     RMSESD RsquaredSD      MAESD
## 1       1 0.9365643 0.1236737 0.7536160 0.01668298 0.01993896 0.01279423
## 2       2 0.9079494 0.1770387 0.7298664 0.01750665 0.02892992 0.01357292
## 3       3 0.8944193 0.2008249 0.7155952 0.01901631 0.02919065 0.01320105
## 4       4 0.8742691 0.2363913 0.6937420 0.02185141 0.03097169 0.01484363
## 5       5 0.8638186 0.2544505 0.6856338 0.02338283 0.03597771 0.01555964
## 6       6 0.8614669 0.2584423 0.6838031 0.02330853 0.03655409 0.01630994
## 7       7 0.8616264 0.2581201 0.6838827 0.02252992 0.03576400 0.01543925
## 8       8 0.8606372 0.2598909 0.6832436 0.02346111 0.03617819 0.01574605
## 9       9 0.8589201 0.2629713 0.6817973 0.02303138 0.03595457 0.01457108
## 10     10 0.8560569 0.2677599 0.6802123 0.02220196 0.03458445 0.01366388
## 11     11 0.8566254 0.2668271 0.6804815 0.02260781 0.03462453 0.01470736
## 12     12 0.8560935 0.2677508 0.6801040 0.02254458 0.03351414 0.01464169
## 13     13 0.8555747 0.2686228 0.6800973 0.02206305 0.03335563 0.01510989
## 14     14 0.8684996 0.2458145 0.6912694 0.04555876 0.07697800 0.03766811
## 15     15 0.8564294 0.2671722 0.6809500 0.02114252 0.03193376 0.01400192
## 16     16 0.8572779 0.2657165 0.6813558 0.02112224 0.03148819 0.01412545
## 17     17 0.8582069 0.2641794 0.6810568 0.02120382 0.03072687 0.01417740
## 18     18 0.8683081 0.2457771 0.6890683 0.03781980 0.06997428 0.02875000
## 19     19 0.8589868 0.2629528 0.6816334 0.02177558 0.03061139 0.01507559
## 20     20 0.8594587 0.2621429 0.6820007 0.02169319 0.03064491 0.01488072
## 21     21 0.8678498 0.2483197 0.6886636 0.03595766 0.04640848 0.02849371
## 22     22 0.8708501 0.2419670 0.6916296 0.04217455 0.07105227 0.03449019
## 23     23 0.8810467 0.2234093 0.6988261 0.04534472 0.07411202 0.03735346
## 24     24 0.8600881 0.2611761 0.6823737 0.02007091 0.02934298 0.01373595
## 25     25 0.8597106 0.2618264 0.6820676 0.01977088 0.02939082 0.01361317
## 26     26 0.8597122 0.2618222 0.6821121 0.01991373 0.02947959 0.01351560
## 27     27 0.8602658 0.2609066 0.6828477 0.01955309 0.02872751 0.01344418
## 28     28 0.8606623 0.2602611 0.6834735 0.01934106 0.02845230 0.01325420
## 29     29 0.8700300 0.2427569 0.6915041 0.02582542 0.05922596 0.02239276
## 30     30 0.8607544 0.2601400 0.6836420 0.01890604 0.02802257 0.01297868
## 31     31 0.8822959 0.2215009 0.7010596 0.04860779 0.08772134 0.03984491
## 32     32 0.9028842 0.1848311 0.7178647 0.06526738 0.09863975 0.05341226
## 33     33 0.8699817 0.2427809 0.6901422 0.03509189 0.06834113 0.02579762
## 34     34 0.8619047 0.2582813 0.6840148 0.01916073 0.02949779 0.01381832
## 35     35 0.8733213 0.2379619 0.6927647 0.04238067 0.06708812 0.03363947
## 36     36 0.8627138 0.2569905 0.6846949 0.01920017 0.02907708 0.01353399
## 37     37 0.8627289 0.2569871 0.6847022 0.01946511 0.02971603 0.01372192
## 38     38 0.8701930 0.2425128 0.6899009 0.03433132 0.06696438 0.02494788
## 39     39 0.8624480 0.2574873 0.6844157 0.01956892 0.02998905 0.01393425
## 40     40 0.8797562 0.2251616 0.6985789 0.04317398 0.08106969 0.03073313
## 41     41 0.8631472 0.2563417 0.6845997 0.01931524 0.02937825 0.01399582
## 42     42 0.8821562 0.2214747 0.6996974 0.04867200 0.08943125 0.03900645
## 43     43 0.8741159 0.2368415 0.6925741 0.04320596 0.06826614 0.03449859
## 44     44 0.8745547 0.2362301 0.6941923 0.04142863 0.07041583 0.03421045
## 45     45 0.8819326 0.2214796 0.6994800 0.02552627 0.07106821 0.02272890
## 46     46 0.9026960 0.1859372 0.7164704 0.04777782 0.08844200 0.04028648
## 47     47 0.8701520 0.2430572 0.6907379 0.03550407 0.06053715 0.02763191
## 48     48 0.8641921 0.2548261 0.6851247 0.01966986 0.02915284 0.01396755
## 49     49 0.8645180 0.2543106 0.6853467 0.01963492 0.02934337 0.01397718
## 50     50 0.8819576 0.2235049 0.6989654 0.04258227 0.06935197 0.03508431
## 51     51 0.8731167 0.2381544 0.6922569 0.02466306 0.05692725 0.02151898
## 52     52 0.8762601 0.2336580 0.6941138 0.04306383 0.06734889 0.03443684
## 53     53 0.8798719 0.2254673 0.6984527 0.04416932 0.08089270 0.03501418
## 54     54 0.8652586 0.2532166 0.6852904 0.01969358 0.02942862 0.01408760
## 55     55 0.8652025 0.2533017 0.6852252 0.01977879 0.02915355 0.01420397
## 56     56 0.8652781 0.2532059 0.6852678 0.01970730 0.02956590 0.01403179
## 57     57 0.8745841 0.2362678 0.6937008 0.03451621 0.05839301 0.02465673
## 58     58 0.8863463 0.2169775 0.7012670 0.04821335 0.06047005 0.03984165
## 59     59 0.8657470 0.2524974 0.6854787 0.01972586 0.02973751 0.01403003
## 60     60 0.8654432 0.2530639 0.6854319 0.02007355 0.03051382 0.01396901
## 61     61 0.8932155 0.2021862 0.7089150 0.04659690 0.09315985 0.04020249
## 62     62 0.8652109 0.2534149 0.6848095 0.01984374 0.03025510 0.01383974
## 63     63 0.8715784 0.2409364 0.6909191 0.03629375 0.06193559 0.02747638
## 64     64 0.8760296 0.2336382 0.6933162 0.02566840 0.05958077 0.01904316
## 65     65 0.8880385 0.2125886 0.7021296 0.04299140 0.07926482 0.03501457
## 66     66 0.8725339 0.2395036 0.6916220 0.03634364 0.06172763 0.02743279
## 67     67 0.8834021 0.2214880 0.7001810 0.04202842 0.06328680 0.03233084
## 68     68 0.8668116 0.2509942 0.6860174 0.02003041 0.02942325 0.01394937
## 69     69 0.8751975 0.2355059 0.6931327 0.03478228 0.06442927 0.02695087
## 70     70 0.8666732 0.2512500 0.6862264 0.02021154 0.02917964 0.01377957
## 71     71 0.9060678 0.1811125 0.7179772 0.05270345 0.08156244 0.04574893
## 72     72 0.8669202 0.2508642 0.6861849 0.02022137 0.02891208 0.01394547
## 73     73 0.8675436 0.2498871 0.6868014 0.02074609 0.02877446 0.01429179
## 74     74 0.8733473 0.2383365 0.6926002 0.03615040 0.06062940 0.02743200
## 75     75 0.8960918 0.1994615 0.7090151 0.05361269 0.07794844 0.04366084
## 76     76 0.9024684 0.1852846 0.7163689 0.05230750 0.09356539 0.04153517
## 77     77 0.8669084 0.2509829 0.6864569 0.02065770 0.02835492 0.01421158
## 78     78 0.8776902 0.2313026 0.6946336 0.02513841 0.05748066 0.01774298
## 79     79 0.8791868 0.2298142 0.6954161 0.04316382 0.05748931 0.03351265
## 80     80 0.8757350 0.2358449 0.6933919 0.03425949 0.04489868 0.02733257
## 81     81 0.8765512 0.2330180 0.6941004 0.02445984 0.05681275 0.02112592
## 82     82 0.8765853 0.2335071 0.6954750 0.03456584 0.05725502 0.02575949
## 83     83 0.8680133 0.2493031 0.6874608 0.02049275 0.02866357 0.01389431
## 84     84 0.8867676 0.2155631 0.7021050 0.03523085 0.06312158 0.02776164
## 85     85 0.8874227 0.2143836 0.7052181 0.04758269 0.08050568 0.03787617
## 86     86 0.8684358 0.2486752 0.6878719 0.02031437 0.02834568 0.01402231
## 87     87 0.8862128 0.2155965 0.7019380 0.03530312 0.07689518 0.02948677
## 88     88 0.8876323 0.2148273 0.7040960 0.04803901 0.07366866 0.04013627
## 89     89 0.8890348 0.2128303 0.7033102 0.04926284 0.06265417 0.03916855
## 90     90 0.8791539 0.2297972 0.6971900 0.04174764 0.06906331 0.03471808
## 91     91 0.8881358 0.2121384 0.7027069 0.02402746 0.06971902 0.02008159
## 92     92 0.8961761 0.1980263 0.7110722 0.05397855 0.09997081 0.04344317
## 93     93 0.8767937 0.2328224 0.6944043 0.03546367 0.06640248 0.02600166
## 94     94 0.8896919 0.2110558 0.7050542 0.04943229 0.07173124 0.03794960
## 95     95 0.8893312 0.2115198 0.7048082 0.04966344 0.07924278 0.03827840
## 96     96 0.8693403 0.2473081 0.6885215 0.02090901 0.02951080 0.01422812
## 97     97 0.8881952 0.2132589 0.7031636 0.03500766 0.06315916 0.02769629
## 98     98 0.8694349 0.2471385 0.6885569 0.02082652 0.02915339 0.01456965
## 99     99 0.8779681 0.2313865 0.6965128 0.03466475 0.05784302 0.02560284
## 100   100 0.8873298 0.2146413 0.7030686 0.05154811 0.08258908 0.04013377
## 101   101 0.8790121 0.2299558 0.6967405 0.03716780 0.06748775 0.02895051
## 102   102 0.8781525 0.2311377 0.6969265 0.03476901 0.05797698 0.02563416
## 103   103 0.8905289 0.2091553 0.7044055 0.04258219 0.07033540 0.03505715
## 104   104 0.8695932 0.2469401 0.6890519 0.02076765 0.02928223 0.01460585
## 105   105 0.8880504 0.2135613 0.7038346 0.05074847 0.07415235 0.03949009
## 106   106 0.8800272 0.2278115 0.6970377 0.02498742 0.05665633 0.01767124
## 107   107 0.8821651 0.2252208 0.6979574 0.04336607 0.05758522 0.03391988
## 108   108 0.8807501 0.2266920 0.6972507 0.02477364 0.05633373 0.01752880
## 109   109 0.8967270 0.1972821 0.7115335 0.04144454 0.07464579 0.03338275
## 110   110 0.8997634 0.1937672 0.7126542 0.05561697 0.08774994 0.04500401
## 111   111 0.8702263 0.2459552 0.6895464 0.02101232 0.02965806 0.01454195
## 112   112 0.8903524 0.2096682 0.7050336 0.05009782 0.07909699 0.03932298
## 113   113 0.8705723 0.2454088 0.6898242 0.02093825 0.02914713 0.01467843
## 114   114 0.8879206 0.2134248 0.7041379 0.04595357 0.08760947 0.03516943
## 115   115 0.8823528 0.2250199 0.6983838 0.04267116 0.05652946 0.03395477
## 116   116 0.8811961 0.2268070 0.6988290 0.04208795 0.06926264 0.03484253
## 117   117 0.8787213 0.2300289 0.6963509 0.03640613 0.06784676 0.02691773
## 118   118 0.8823546 0.2250464 0.6983996 0.04270534 0.05650324 0.03388883
## 119   119 0.8938430 0.2051221 0.7067402 0.05531859 0.07782329 0.04467267
## 120   120 0.8942301 0.2024635 0.7080069 0.02530332 0.05844446 0.02338602
## 121   121 0.8869207 0.2161184 0.7029837 0.04173290 0.06596414 0.03176377
## 122   122 0.8853800 0.2188253 0.7025751 0.04014896 0.06543125 0.03290895
## 123   123 0.8711735 0.2446217 0.6900063 0.02090185 0.02947937 0.01443953
## 124   124 0.8712499 0.2445088 0.6900996 0.02077998 0.02925185 0.01437353
## 125   125 0.8852838 0.2190458 0.7016937 0.03776990 0.05970707 0.03009853
## 126   126 0.8769383 0.2339106 0.6952785 0.03400644 0.05449170 0.02441856
## 127   127 0.8807512 0.2282340 0.6970068 0.03579438 0.04360425 0.02844577
## 128   128 0.8713120 0.2444134 0.6901454 0.02080916 0.02913392 0.01424772
## 129   129 0.8785592 0.2315281 0.6960533 0.03228396 0.04256308 0.02558950
## 130   130 0.8881676 0.2144256 0.7032615 0.04519594 0.07175614 0.03689767
## 131   131 0.8841984 0.2210339 0.7013286 0.04080732 0.05991611 0.03150366
## 132   132 0.8781843 0.2321274 0.6958763 0.03240911 0.05195299 0.02532678
## 133   133 0.8885599 0.2129868 0.7051763 0.04132372 0.07063382 0.02833477
## 134   134 0.8885267 0.2122287 0.7053880 0.03226337 0.06533686 0.02445234
## 135   135 0.8783801 0.2318316 0.6958231 0.03231969 0.05160722 0.02528119
## 136   136 0.8831134 0.2225367 0.7015132 0.03547883 0.06295876 0.02622033
## 137   137 0.8909363 0.2093461 0.7060760 0.04074586 0.06451993 0.03254968
## 138   138 0.8876283 0.2139765 0.7046909 0.03572435 0.06937936 0.02734170
## 139   139 0.8712273 0.2446337 0.6901116 0.02057606 0.02813964 0.01408198
## 140   140 0.8773690 0.2335695 0.6937533 0.01656964 0.03589462 0.01041944
## 141   141 0.8843899 0.2216278 0.7004931 0.03969360 0.05325514 0.03005578
## 142   142 0.8770043 0.2337341 0.6943001 0.01860452 0.04024808 0.01432655
## 143   143 0.8772930 0.2338241 0.6940832 0.03018234 0.04318959 0.02255753
## 144   144 0.8779113 0.2329412 0.6954075 0.03034209 0.03850299 0.02377100
## 145   145 0.8849571 0.2202284 0.7018672 0.03787237 0.05658704 0.03101406
## 146   146 0.8856490 0.2193301 0.7018079 0.03854409 0.05585330 0.03124995
## 147   147 0.8779845 0.2328427 0.6956111 0.03062902 0.03886213 0.02420119
## 148   148 0.8828540 0.2232540 0.6982054 0.02741000 0.04932691 0.02185099
## 149   149 0.8830156 0.2228023 0.7015840 0.03624636 0.06408037 0.02680838
## 150   150 0.8710704 0.2449394 0.6901420 0.02074007 0.02833590 0.01390146
## 151   151 0.8771149 0.2340161 0.6938959 0.01689200 0.03650757 0.01031599
## 152   152 0.8710439 0.2449781 0.6901303 0.02067980 0.02829817 0.01393167
## 153   153 0.8763071 0.2354355 0.6946701 0.03265122 0.05076213 0.02250645
## 154   154 0.8773978 0.2336162 0.6941162 0.01706643 0.03648034 0.01049713
## 155   155 0.8772313 0.2339998 0.6942587 0.03074284 0.04392757 0.02283432
## 156   156 0.8711705 0.2448291 0.6900400 0.02100605 0.02843497 0.01419098
## 157   157 0.8917184 0.2080722 0.7071321 0.04165064 0.07155356 0.03057671
## 158   158 0.8830328 0.2230756 0.6984236 0.02808380 0.04978640 0.02230207
## 159   159 0.8712442 0.2447382 0.6902155 0.02112477 0.02861427 0.01423198
## 160   160 0.8712939 0.2446732 0.6902229 0.02120927 0.02878279 0.01423106
## 161   161 0.8792484 0.2307976 0.6965309 0.03450789 0.05443992 0.02671256
## 162   162 0.8773659 0.2338292 0.6943017 0.03097357 0.04430455 0.02307861
## 163   163 0.9053818 0.1843795 0.7157695 0.04226576 0.06810901 0.03296967
## 164   164 0.8711626 0.2448654 0.6901244 0.02130914 0.02893649 0.01441282
## 165   165 0.8711896 0.2448254 0.6902037 0.02129250 0.02892562 0.01437011
## 166   166 0.8825132 0.2244600 0.6988889 0.03934787 0.05964149 0.02842343
## 167   167 0.8773387 0.2335165 0.6963069 0.03185273 0.05600972 0.02466964
## 168   168 0.8764585 0.2352330 0.6947786 0.03317726 0.05143325 0.02292088
## 169   169 0.8853609 0.2197275 0.7017771 0.03873814 0.06633716 0.03003136
## 170   170 0.8969943 0.1986235 0.7090437 0.03186406 0.05499626 0.02569715
## 171   171 0.8772715 0.2337150 0.6955156 0.03015274 0.04738763 0.01993269
## 172   172 0.8711308 0.2449391 0.6900102 0.02132361 0.02944756 0.01405425
## 173   173 0.8788238 0.2315335 0.6962921 0.03415416 0.05411603 0.02651226
## 174   174 0.8710595 0.2450524 0.6900226 0.02119134 0.02941022 0.01397852
## 175   175 0.8819765 0.2248788 0.7004738 0.03911016 0.06766862 0.02912259
## 176   176 0.8775050 0.2336354 0.6944693 0.03142886 0.04531519 0.02316051
## 177   177 0.8836776 0.2220384 0.7016318 0.03675402 0.06855830 0.02830100
## 178   178 0.8767563 0.2344055 0.6960170 0.03083001 0.05466123 0.02427947
## 179   179 0.8771774 0.2336021 0.6944936 0.01934757 0.04230079 0.01447292
## 180   180 0.8842062 0.2216586 0.6999682 0.03684584 0.06052832 0.02760136
## 181   181 0.8710621 0.2450610 0.6899539 0.02113441 0.02930182 0.01393759
## 182   182 0.8773183 0.2336710 0.6953280 0.03031958 0.04795258 0.01976529
## 183   183 0.8764649 0.2353879 0.6945290 0.03370409 0.05230365 0.02323804
## 184   184 0.8778125 0.2330155 0.6943328 0.01787912 0.03922403 0.01080818
## 185   185 0.8709901 0.2451699 0.6898404 0.02102933 0.02910482 0.01377659
## 186   186 0.8710149 0.2451310 0.6898602 0.02103722 0.02913694 0.01376368
## 187   187 0.8943494 0.2046563 0.7086186 0.04439265 0.06659718 0.03631762
## 188   188 0.8831106 0.2231985 0.7003239 0.03739352 0.06212067 0.02933691
## 189   189 0.8777498 0.2330968 0.6958261 0.03019375 0.05269710 0.02250059
## 190   190 0.8764103 0.2354932 0.6945957 0.03374887 0.05233494 0.02345894
## 191   191 0.8766569 0.2346453 0.6959247 0.03072530 0.05432256 0.02417583
## 192   192 0.8709712 0.2452138 0.6899392 0.02109606 0.02913178 0.01401495
## 193   193 0.8709630 0.2452153 0.6899571 0.02107376 0.02906165 0.01398495
## 194   194 0.8771244 0.2337058 0.6943960 0.01979326 0.04295587 0.01510961
## 195   195 0.8900946 0.2110887 0.7059017 0.04184141 0.06344153 0.03364339
## 196   196 0.8833638 0.2231604 0.7005634 0.03965294 0.06707302 0.02886321
## 197   197 0.8778698 0.2329310 0.6959447 0.03054640 0.05311989 0.02303724
## 198   198 0.8709362 0.2452538 0.6899037 0.02114948 0.02905340 0.01407753
## 199   199 0.8778068 0.2330295 0.6958969 0.03060173 0.05316317 0.02307152
## 200   200 0.8709257 0.2452752 0.6899232 0.02111645 0.02900423 0.01403420
## 201   201 0.8709900 0.2451787 0.6899355 0.02110991 0.02901857 0.01405752
## 202   202 0.8708800 0.2453426 0.6898542 0.02108169 0.02900399 0.01402446
## 203   203 0.8822449 0.2247976 0.7005981 0.04014678 0.06838389 0.02993296
## 204   204 0.8777869 0.2330835 0.6958779 0.03058122 0.05314272 0.02289808
## 205   205 0.8777755 0.2333443 0.6953865 0.03143003 0.04019148 0.02448087
## 206   206 0.8708463 0.2454086 0.6898147 0.02109298 0.02900501 0.01405162
## 207   207 0.8708843 0.2453488 0.6898349 0.02108793 0.02894228 0.01407714
## 208   208 0.8778978 0.2331569 0.6954029 0.03166306 0.04052815 0.02463750
## 209   209 0.8708897 0.2453462 0.6898105 0.02106403 0.02890307 0.01404738
## 210   210 0.8708898 0.2453374 0.6898011 0.02101205 0.02885188 0.01403132
## 211   211 0.8708959 0.2453254 0.6898031 0.02100373 0.02885301 0.01400295
## 212   212 0.8910485 0.2095971 0.7077067 0.04195313 0.07037863 0.03366007
## 213   213 0.8709249 0.2452777 0.6898214 0.02100077 0.02874207 0.01398078
## 214   214 0.8857540 0.2190424 0.7016905 0.03297921 0.06133378 0.02777038
## 215   215 0.8709433 0.2452467 0.6898224 0.02095978 0.02868184 0.01394823
## 216   216 0.8781318 0.2325133 0.6947492 0.01838755 0.04030732 0.01149088
## 217   217 0.8769393 0.2342917 0.6960485 0.03115040 0.05481293 0.02475400
## 218   218 0.8709594 0.2452213 0.6898122 0.02092089 0.02859895 0.01389152
## 219   219 0.8709504 0.2452379 0.6898225 0.02095061 0.02860147 0.01392855
## 220   220 0.8709595 0.2452215 0.6898295 0.02091165 0.02857674 0.01389363
## 221   221 0.8776376 0.2334405 0.6949424 0.03176891 0.04563396 0.02485092
## 222   222 0.8782940 0.2325723 0.6957195 0.03250050 0.04171031 0.02551631
## 223   223 0.8709661 0.2452133 0.6898494 0.02091606 0.02863118 0.01390926
## 224   224 0.8709725 0.2451994 0.6898613 0.02087188 0.02860715 0.01387450
## 225   225 0.8777332 0.2330876 0.6955773 0.03123433 0.04921745 0.02042175
## 226   226 0.8799875 0.2299521 0.6965204 0.03487411 0.04123035 0.02772406
## 227   227 0.8774499 0.2332170 0.6945491 0.01993798 0.04365775 0.01523257
## 228   228 0.8774275 0.2332623 0.6944787 0.01993739 0.04362411 0.01513202
## 229   229 0.8782547 0.2323516 0.6949476 0.01852453 0.04066171 0.01170167
## 230   230 0.8709578 0.2452244 0.6898649 0.02087034 0.02869888 0.01386252
## 231   231 0.8709710 0.2452039 0.6898742 0.02086966 0.02867474 0.01386523
## 232   232 0.8866944 0.2181125 0.7021300 0.03206324 0.05905111 0.02592700
## 233   233 0.8783169 0.2322635 0.6950739 0.01863309 0.04085134 0.01191995
## 234   234 0.8932242 0.2060625 0.7067224 0.02772155 0.06215890 0.02414876
## 235   235 0.8709550 0.2452287 0.6898716 0.02086803 0.02864109 0.01384866
## 236   236 0.8777029 0.2333690 0.6950907 0.03192672 0.04586394 0.02516570
## 237   237 0.8776781 0.2334071 0.6950696 0.03186372 0.04576035 0.02510013
## 238   238 0.8990969 0.1956639 0.7116227 0.04271014 0.06616901 0.03173134
## 239   239 0.8979575 0.1976363 0.7110696 0.04328589 0.07333348 0.03488553
## 240   240 0.8709482 0.2452370 0.6898676 0.02086412 0.02861708 0.01385710
##    nvmax
## 13    13
## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## Coefficients of final model:

##   (Intercept)            x4            x7            x8            x9 
## -3.832583e+00 -1.336998e-03  3.502871e-01  1.316519e-02  1.026206e-01 
##           x10           x11           x16           x17           x21 
##  3.532314e-02  6.547159e+06  2.676086e-02  3.808197e-02  3.363089e-03 
##        stat14        stat98       stat110      sqrt.x18 
## -2.807359e-02  1.008003e-01 -9.721343e-02  7.689960e-01

Test

if (algo.stepwise.caret == TRUE){
  # test.model(model.stepwise, data.test, "Stepwise Selection", draw.limits = TRUE, regsubset = TRUE, id = id, formula = formula)
  test.model(model.stepwise, data.test
             ,method = 'leapSeq',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE)
  
}
## [1] "Summary of predicted values: "
##      Min.   1st Qu.    Median      Mean   3rd Qu.      Max. 
## -1.615548 -0.383649  0.009481 -0.010402  0.359098  1.565338 
## [1] "leapSeq  Test MSE: 0.707385129532587"

Stepwise Selection with CV (w/ filtered train)

Train

Test

LASSO (w/ full train)

Train

if(algo.LASSO == TRUE){
  # Formatting data for GLM net
  # you can use model.matrix as well -- model.matrix creates a design (or model) matrix, 
  # e.g., by expanding factors to a set of dummy variables (depending on the contrasts) 
  # and expanding interactions similarly.
  x = as.matrix(data.train[,feature.names])
  y = data.train[,label.names]
  
  xtest = as.matrix(data.test[,feature.names]) 
  ytest = data.test[,label.names] 
  
  grid=10^seq(10,-2, length =100)
  
  set.seed(1)
  model.LASSO=glmnet(x,y,alpha=1, lambda =grid)
  
  cv.out=cv.glmnet(x,y,alpha=1) # alpha=1 performs LASSO
  plot(cv.out)
  bestlambda<-cv.out$lambda.min  # Optimal penalty parameter.  You can make this call visually.
  
  print(coef(model.LASSO,s=bestlambda))
}

Test

if(algo.LASSO == TRUE){
  lasso.pred=predict (model.LASSO ,s=bestlambda ,newx=xtest)
  
  testMSE_LASSO = mean((ytest-lasso.pred)^2)
  print (paste("LASSO Test RMSE: ",testMSE_LASSO, sep=""))
  
  plot(ytest,lasso.pred)
}

LASSO (w/ filtered train)

Train

if(algo.LASSO == TRUE){
  # Formatting data for GLM net
  # you can use model.matrix as well -- model.matrix creates a design (or model) matrix, 
  # e.g., by expanding factors to a set of dummy variables (depending on the contrasts) 
  # and expanding interactions similarly.
  x = as.matrix(data.train2[,feature.names])
  y = data.train2[,label.names]
  
  xtest = as.matrix(data.test[,feature.names]) 
  ytest = data.test[,label.names] 
  
  grid=10^seq(10,-2, length =100)
  
  set.seed(1)
  model.LASSO=glmnet(x,y,alpha=1, lambda =grid)
  
  cv.out=cv.glmnet(x,y,alpha=1) # alpha=1 performs LASSO
  plot(cv.out)
  bestlambda<-cv.out$lambda.min  # Optimal penalty parameter.  You can make this call visually.
  
  print(coef(model.LASSO,s=bestlambda))
}

Test

if(algo.LASSO == TRUE){
  lasso.pred=predict (model.LASSO ,s=bestlambda ,newx=xtest)  
  
  testMSE_LASSO = mean((ytest-lasso.pred)^2)
  print (paste("LASSO Test RMSE: ",testMSE_LASSO, sep=""))
  
  plot(ytest,lasso.pred)
}

LASSO with CV (w/ full train)

Train

if (algo.LASSO.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train
                                   ,method = "glmnet"
                                   ,subopt = 'LASSO'
                                   ,feature.names = feature.names)
  model.LASSO.caret = returned$model
}
## Warning in nominalTrainWorkflow(x = x, y = y, wts = weights, info =
## trainInfo, : There were missing values in resampled performance measures.
## Aggregating results
## Selecting tuning parameters
## Fitting alpha = 1, lambda = 0.0159 on full training set
## glmnet 
## 
## 6002 samples
##  240 predictor
## 
## No pre-processing
## Resampling: Cross-Validated (10 fold) 
## Summary of sample sizes: 5402, 5401, 5402, 5401, 5402, 5402, ... 
## Resampling results across tuning parameters:
## 
##   lambda      RMSE       Rsquared   MAE      
##   0.01000000  0.8583978  0.2637735  0.6825531
##   0.01047616  0.8582192  0.2641419  0.6825038
##   0.01097499  0.8580657  0.2644752  0.6824781
##   0.01149757  0.8579246  0.2647982  0.6824721
##   0.01204504  0.8577994  0.2651045  0.6824723
##   0.01261857  0.8577108  0.2653584  0.6825107
##   0.01321941  0.8576488  0.2655775  0.6825836
##   0.01384886  0.8576068  0.2657755  0.6826746
##   0.01450829  0.8575850  0.2659512  0.6827977
##   0.01519911  0.8575631  0.2661425  0.6829169
##   0.01592283  0.8575402  0.2663528  0.6830369
##   0.01668101  0.8575579  0.2665035  0.6831899
##   0.01747528  0.8576093  0.2666076  0.6833627
##   0.01830738  0.8576962  0.2666599  0.6835745
##   0.01917910  0.8578111  0.2666776  0.6838080
##   0.02009233  0.8579626  0.2666430  0.6840772
##   0.02104904  0.8581621  0.2665386  0.6843781
##   0.02205131  0.8584096  0.2663646  0.6847084
##   0.02310130  0.8586993  0.2661309  0.6850740
##   0.02420128  0.8590191  0.2658628  0.6854769
##   0.02535364  0.8593669  0.2655623  0.6859148
##   0.02656088  0.8597560  0.2652082  0.6863776
##   0.02782559  0.8601932  0.2647904  0.6868710
##   0.02915053  0.8606990  0.2642741  0.6874098
##   0.03053856  0.8612658  0.2636714  0.6879965
##   0.03199267  0.8618784  0.2630136  0.6886242
##   0.03351603  0.8625374  0.2623001  0.6892934
##   0.03511192  0.8632386  0.2615414  0.6900032
##   0.03678380  0.8639860  0.2607308  0.6907494
##   0.03853529  0.8647856  0.2598582  0.6915331
##   0.04037017  0.8656354  0.2589276  0.6923396
##   0.04229243  0.8665411  0.2579301  0.6931906
##   0.04430621  0.8674986  0.2568762  0.6940977
##   0.04641589  0.8684883  0.2558134  0.6950462
##   0.04862602  0.8695178  0.2547296  0.6960412
##   0.05094138  0.8705193  0.2537770  0.6970107
##   0.05336699  0.8715130  0.2529194  0.6979802
##   0.05590810  0.8725407  0.2520854  0.6989797
##   0.05857021  0.8736243  0.2512347  0.7000044
##   0.06135907  0.8747403  0.2504340  0.7010479
##   0.06428073  0.8759140  0.2496367  0.7021530
##   0.06734151  0.8771351  0.2488824  0.7033041
##   0.07054802  0.8784181  0.2481466  0.7044968
##   0.07390722  0.8798220  0.2473018  0.7057755
##   0.07742637  0.8813590  0.2463259  0.7071632
##   0.08111308  0.8830424  0.2451926  0.7086541
##   0.08497534  0.8848858  0.2438716  0.7102533
##   0.08902151  0.8869041  0.2423263  0.7119701
##   0.09326033  0.8891134  0.2405115  0.7138243
##   0.09770100  0.8915313  0.2383714  0.7158424
##   0.10235310  0.8941768  0.2358371  0.7180433
##   0.10722672  0.8970708  0.2328230  0.7204080
##   0.11233240  0.9002356  0.2292226  0.7230007
##   0.11768120  0.9036864  0.2249313  0.7258249
##   0.12328467  0.9074457  0.2198015  0.7288903
##   0.12915497  0.9111514  0.2149714  0.7319129
##   0.13530478  0.9148764  0.2102337  0.7349673
##   0.14174742  0.9188285  0.2048585  0.7381875
##   0.14849683  0.9230498  0.1985894  0.7415687
##   0.15556761  0.9276304  0.1909138  0.7451624
##   0.16297508  0.9325867  0.1815206  0.7489916
##   0.17073526  0.9371949  0.1730267  0.7525021
##   0.17886495  0.9414888  0.1655059  0.7556733
##   0.18738174  0.9453354  0.1598535  0.7584425
##   0.19630407  0.9489015  0.1556189  0.7609409
##   0.20565123  0.9528001  0.1500045  0.7636688
##   0.21544347  0.9570595  0.1424704  0.7666746
##   0.22570197  0.9612202  0.1349120  0.7696029
##   0.23644894  0.9652277  0.1275307  0.7724597
##   0.24770764  0.9686993  0.1237665  0.7749128
##   0.25950242  0.9717890  0.1237002  0.7770609
##   0.27185882  0.9751611  0.1236737  0.7794072
##   0.28480359  0.9788450  0.1236737  0.7819715
##   0.29836472  0.9828723  0.1236737  0.7847479
##   0.31257158  0.9872735  0.1236737  0.7878364
##   0.32745492  0.9920814  0.1236737  0.7912441
##   0.34304693  0.9973103  0.1190183  0.7949429
##   0.35938137  0.9997577        NaN  0.7966682
##   0.37649358  0.9997577        NaN  0.7966682
##   0.39442061  0.9997577        NaN  0.7966682
##   0.41320124  0.9997577        NaN  0.7966682
##   0.43287613  0.9997577        NaN  0.7966682
##   0.45348785  0.9997577        NaN  0.7966682
##   0.47508102  0.9997577        NaN  0.7966682
##   0.49770236  0.9997577        NaN  0.7966682
##   0.52140083  0.9997577        NaN  0.7966682
##   0.54622772  0.9997577        NaN  0.7966682
##   0.57223677  0.9997577        NaN  0.7966682
##   0.59948425  0.9997577        NaN  0.7966682
##   0.62802914  0.9997577        NaN  0.7966682
##   0.65793322  0.9997577        NaN  0.7966682
##   0.68926121  0.9997577        NaN  0.7966682
##   0.72208090  0.9997577        NaN  0.7966682
##   0.75646333  0.9997577        NaN  0.7966682
##   0.79248290  0.9997577        NaN  0.7966682
##   0.83021757  0.9997577        NaN  0.7966682
##   0.86974900  0.9997577        NaN  0.7966682
##   0.91116276  0.9997577        NaN  0.7966682
##   0.95454846  0.9997577        NaN  0.7966682
##   1.00000000  0.9997577        NaN  0.7966682
## 
## Tuning parameter 'alpha' was held constant at a value of 1
## RMSE was used to select the optimal model using the smallest value.
## The final values used for the model were alpha = 1 and lambda = 0.01592283.

##    alpha     lambda
## 11     1 0.01592283
## Warning: Removed 23 rows containing missing values (geom_path).
## Warning: Removed 23 rows containing missing values (geom_point).

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

Test

if (algo.LASSO.caret == TRUE){
  test.model(model.LASSO.caret, data.test
             ,method = 'glmnet',subopt = "LASSO"
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,draw.limits = TRUE)
}
## [1] "Summary of predicted values: "
##     Min.  1st Qu.   Median     Mean  3rd Qu.     Max. 
## -1.51042 -0.35594  0.00811 -0.01164  0.33226  1.44709 
## [1] "glmnet LASSO Test MSE: 0.707600445355111"

LASSO with CV (w/ filtered train)

Train

if (algo.LASSO.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train2
                                   ,method = "glmnet"
                                   ,subopt = 'LASSO'
                                   ,feature.names = feature.names)
  model.LASSO.caret = returned$model
}
## Warning in nominalTrainWorkflow(x = x, y = y, wts = weights, info =
## trainInfo, : There were missing values in resampled performance measures.
## Aggregating results
## Selecting tuning parameters
## Fitting alpha = 1, lambda = 0.01 on full training set
## glmnet 
## 
## 5694 samples
##  240 predictor
## 
## No pre-processing
## Resampling: Cross-Validated (10 fold) 
## Summary of sample sizes: 5123, 5125, 5125, 5124, 5125, 5124, ... 
## Resampling results across tuning parameters:
## 
##   lambda      RMSE       Rsquared   MAE      
##   0.01000000  0.7331725  0.3593495  0.6005220
##   0.01047616  0.7332299  0.3593534  0.6005948
##   0.01097499  0.7333077  0.3593320  0.6006897
##   0.01149757  0.7334213  0.3592577  0.6008326
##   0.01204504  0.7335675  0.3591355  0.6010174
##   0.01261857  0.7337529  0.3589545  0.6012509
##   0.01321941  0.7339686  0.3587326  0.6015073
##   0.01384886  0.7342267  0.3584472  0.6017918
##   0.01450829  0.7345383  0.3580746  0.6021218
##   0.01519911  0.7349070  0.3576103  0.6024887
##   0.01592283  0.7352881  0.3571329  0.6028613
##   0.01668101  0.7357166  0.3565825  0.6032685
##   0.01747528  0.7361447  0.3560473  0.6037046
##   0.01830738  0.7366154  0.3554463  0.6041690
##   0.01917910  0.7370378  0.3549538  0.6045901
##   0.02009233  0.7374814  0.3544414  0.6050250
##   0.02104904  0.7379383  0.3539249  0.6054982
##   0.02205131  0.7384331  0.3533564  0.6060026
##   0.02310130  0.7389680  0.3527358  0.6065567
##   0.02420128  0.7395527  0.3520457  0.6071621
##   0.02535364  0.7401109  0.3514306  0.6077318
##   0.02656088  0.7406970  0.3507922  0.6083127
##   0.02782559  0.7412880  0.3501776  0.6088792
##   0.02915053  0.7419234  0.3495113  0.6094720
##   0.03053856  0.7426080  0.3487871  0.6101109
##   0.03199267  0.7433364  0.3480179  0.6107898
##   0.03351603  0.7440806  0.3472675  0.6114746
##   0.03511192  0.7448710  0.3464766  0.6121982
##   0.03678380  0.7456948  0.3456759  0.6129492
##   0.03853529  0.7465846  0.3448005  0.6137529
##   0.04037017  0.7475402  0.3438558  0.6146132
##   0.04229243  0.7485839  0.3427980  0.6155356
##   0.04430621  0.7497339  0.3415910  0.6165337
##   0.04641589  0.7509989  0.3402154  0.6176022
##   0.04862602  0.7523740  0.3386787  0.6187506
##   0.05094138  0.7538713  0.3369551  0.6199898
##   0.05336699  0.7553583  0.3353291  0.6212325
##   0.05590810  0.7568934  0.3336852  0.6225197
##   0.05857021  0.7583447  0.3323339  0.6237288
##   0.06135907  0.7598343  0.3310131  0.6249542
##   0.06428073  0.7613530  0.3297701  0.6262031
##   0.06734151  0.7629878  0.3284144  0.6275325
##   0.07054802  0.7646986  0.3270479  0.6289181
##   0.07390722  0.7665390  0.3255543  0.6303791
##   0.07742637  0.7684157  0.3241897  0.6318606
##   0.08111308  0.7704094  0.3227645  0.6334358
##   0.08497534  0.7725240  0.3213068  0.6350854
##   0.08902151  0.7748218  0.3196521  0.6368533
##   0.09326033  0.7773353  0.3177130  0.6387751
##   0.09770100  0.7800841  0.3154315  0.6408618
##   0.10235310  0.7830893  0.3127361  0.6431281
##   0.10722672  0.7863737  0.3095379  0.6455925
##   0.11233240  0.7899621  0.3057265  0.6482975
##   0.11768120  0.7938812  0.3011639  0.6512601
##   0.12328467  0.7981248  0.2957898  0.6544636
##   0.12915497  0.8026930  0.2895223  0.6579142
##   0.13530478  0.8070543  0.2841350  0.6612514
##   0.14174742  0.8115931  0.2783637  0.6646882
##   0.14849683  0.8163898  0.2718901  0.6682626
##   0.15556761  0.8215785  0.2640388  0.6721144
##   0.16297508  0.8272353  0.2542502  0.6763373
##   0.17073526  0.8333455  0.2422394  0.6808825
##   0.17886495  0.8390177  0.2314923  0.6850103
##   0.18738174  0.8447708  0.2198929  0.6891591
##   0.19630407  0.8494537  0.2132052  0.6923838
##   0.20565123  0.8540099  0.2074269  0.6954779
##   0.21544347  0.8588275  0.2006097  0.6987692
##   0.22570197  0.8640494  0.1915307  0.7023549
##   0.23644894  0.8695398  0.1801569  0.7061149
##   0.24770764  0.8753392  0.1655654  0.7101099
##   0.25950242  0.8796708  0.1598787  0.7130310
##   0.27185882  0.8836352  0.1579728  0.7157315
##   0.28480359  0.8877298  0.1577218  0.7185706
##   0.29836472  0.8921662  0.1577218  0.7216814
##   0.31257158  0.8970100  0.1577218  0.7250606
##   0.32745492  0.9022965  0.1577218  0.7287217
##   0.34304693  0.9080633  0.1577218  0.7327123
##   0.35938137  0.9139336  0.1445719  0.7367976
##   0.37649358  0.9153735        NaN  0.7377998
##   0.39442061  0.9153735        NaN  0.7377998
##   0.41320124  0.9153735        NaN  0.7377998
##   0.43287613  0.9153735        NaN  0.7377998
##   0.45348785  0.9153735        NaN  0.7377998
##   0.47508102  0.9153735        NaN  0.7377998
##   0.49770236  0.9153735        NaN  0.7377998
##   0.52140083  0.9153735        NaN  0.7377998
##   0.54622772  0.9153735        NaN  0.7377998
##   0.57223677  0.9153735        NaN  0.7377998
##   0.59948425  0.9153735        NaN  0.7377998
##   0.62802914  0.9153735        NaN  0.7377998
##   0.65793322  0.9153735        NaN  0.7377998
##   0.68926121  0.9153735        NaN  0.7377998
##   0.72208090  0.9153735        NaN  0.7377998
##   0.75646333  0.9153735        NaN  0.7377998
##   0.79248290  0.9153735        NaN  0.7377998
##   0.83021757  0.9153735        NaN  0.7377998
##   0.86974900  0.9153735        NaN  0.7377998
##   0.91116276  0.9153735        NaN  0.7377998
##   0.95454846  0.9153735        NaN  0.7377998
##   1.00000000  0.9153735        NaN  0.7377998
## 
## Tuning parameter 'alpha' was held constant at a value of 1
## RMSE was used to select the optimal model using the smallest value.
## The final values used for the model were alpha = 1 and lambda = 0.01.

##   alpha lambda
## 1     1   0.01
## Warning: Removed 22 rows containing missing values (geom_path).
## Warning: Removed 22 rows containing missing values (geom_point).

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

Test

if (algo.LASSO.caret == TRUE){
  test.model(model.LASSO.caret, data.test
             ,method = 'glmnet',subopt = "LASSO"
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,draw.limits = TRUE)
}
## [1] "Summary of predicted values: "
##     Min.  1st Qu.   Median     Mean  3rd Qu.     Max. 
## -1.76337 -0.45968 -0.04438 -0.06844  0.30340  1.60102 
## [1] "glmnet LASSO Test MSE: 0.710918505671706"

LARS with CV (w/ full train)

Train

if (algo.LARS.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train
                                   ,method = "lars"
                                   ,subopt = 'NULL'
                                   ,feature.names = feature.names)
  model.LARS.caret = returned$model
}
## Warning in nominalTrainWorkflow(x = x, y = y, wts = weights, info =
## trainInfo, : There were missing values in resampled performance measures.
## Aggregating results
## Selecting tuning parameters
## Fitting fraction = 0.394 on full training set
## Least Angle Regression 
## 
## 6002 samples
##  240 predictor
## 
## Pre-processing: centered (240), scaled (240) 
## Resampling: Cross-Validated (10 fold) 
## Summary of sample sizes: 5402, 5401, 5402, 5401, 5402, 5402, ... 
## Resampling results across tuning parameters:
## 
##   fraction    RMSE       Rsquared   MAE      
##   0.00000000  0.9997577        NaN  0.7966682
##   0.01010101  0.9875243  0.1236737  0.7879771
##   0.02020202  0.9765136  0.1236737  0.7803142
##   0.03030303  0.9667815  0.1238240  0.7735488
##   0.04040404  0.9582779  0.1404488  0.7674966
##   0.05050505  0.9502496  0.1541310  0.7618702
##   0.06060606  0.9429028  0.1627827  0.7567181
##   0.07070707  0.9363359  0.1738892  0.7518754
##   0.08080808  0.9299656  0.1865916  0.7469635
##   0.09090909  0.9238495  0.1973468  0.7421939
##   0.10101010  0.9180614  0.2059358  0.7375603
##   0.11111111  0.9126276  0.2128180  0.7331136
##   0.12121212  0.9075526  0.2196640  0.7289755
##   0.13131313  0.9026844  0.2262642  0.7250094
##   0.14141414  0.8980850  0.2317471  0.7212504
##   0.15151515  0.8937586  0.2362836  0.7176913
##   0.16161616  0.8897092  0.2400233  0.7143168
##   0.17171717  0.8859406  0.2430948  0.7111500
##   0.18181818  0.8824563  0.2456074  0.7081372
##   0.19191919  0.8792599  0.2476534  0.7052713
##   0.20202020  0.8763627  0.2493255  0.7025756
##   0.21212121  0.8737962  0.2510676  0.7001478
##   0.22222222  0.8715859  0.2528262  0.6980518
##   0.23232323  0.8696688  0.2545800  0.6961798
##   0.24242424  0.8678406  0.2565180  0.6944202
##   0.25252525  0.8660831  0.2584592  0.6927507
##   0.26262626  0.8645403  0.2601276  0.6912835
##   0.27272727  0.8631148  0.2616905  0.6898801
##   0.28282828  0.8618307  0.2630866  0.6885658
##   0.29292929  0.8607883  0.2641765  0.6874904
##   0.30303030  0.8599638  0.2649954  0.6865863
##   0.31313131  0.8592946  0.2656298  0.6858032
##   0.32323232  0.8587742  0.2660952  0.6851616
##   0.33333333  0.8583835  0.2664127  0.6846502
##   0.34343434  0.8580865  0.2666087  0.6842575
##   0.35353535  0.8578654  0.2667185  0.6839282
##   0.36363636  0.8577093  0.2667412  0.6836486
##   0.37373737  0.8576031  0.2667115  0.6834201
##   0.38383838  0.8575244  0.2666564  0.6832159
##   0.39393939  0.8575151  0.2664933  0.6830837
##   0.40404040  0.8575360  0.2662914  0.6829887
##   0.41414141  0.8575613  0.2660984  0.6828968
##   0.42424242  0.8575769  0.2659424  0.6827868
##   0.43434343  0.8575861  0.2658114  0.6826788
##   0.44444444  0.8576127  0.2656606  0.6825920
##   0.45454545  0.8576608  0.2654829  0.6825366
##   0.46464646  0.8577169  0.2653014  0.6824883
##   0.47474747  0.8578013  0.2650788  0.6824753
##   0.48484848  0.8579019  0.2648344  0.6824791
##   0.49494949  0.8579995  0.2646022  0.6824744
##   0.50505051  0.8581102  0.2643549  0.6824862
##   0.51515152  0.8582357  0.2640884  0.6825107
##   0.52525253  0.8583665  0.2638174  0.6825472
##   0.53535354  0.8585177  0.2635165  0.6826061
##   0.54545455  0.8586749  0.2632119  0.6826698
##   0.55555556  0.8588487  0.2628840  0.6827409
##   0.56565657  0.8590269  0.2625534  0.6828203
##   0.57575758  0.8592184  0.2622051  0.6829151
##   0.58585859  0.8594256  0.2618352  0.6830220
##   0.59595960  0.8596415  0.2614565  0.6831306
##   0.60606061  0.8598630  0.2610733  0.6832448
##   0.61616162  0.8600844  0.2606957  0.6833602
##   0.62626263  0.8603086  0.2603177  0.6834762
##   0.63636364  0.8605392  0.2599327  0.6835919
##   0.64646465  0.8607776  0.2595385  0.6837162
##   0.65656566  0.8610216  0.2591395  0.6838488
##   0.66666667  0.8612711  0.2587363  0.6839791
##   0.67676768  0.8615256  0.2583298  0.6841136
##   0.68686869  0.8617832  0.2579219  0.6842508
##   0.69696970  0.8620422  0.2575164  0.6843960
##   0.70707071  0.8623020  0.2571143  0.6845402
##   0.71717172  0.8625705  0.2567019  0.6846882
##   0.72727273  0.8628463  0.2562813  0.6848493
##   0.73737374  0.8631256  0.2558586  0.6850111
##   0.74747475  0.8634059  0.2554388  0.6851731
##   0.75757576  0.8636859  0.2550231  0.6853326
##   0.76767677  0.8639656  0.2546116  0.6854916
##   0.77777778  0.8642435  0.2542067  0.6856517
##   0.78787879  0.8645245  0.2537997  0.6858145
##   0.79797980  0.8647989  0.2534059  0.6859712
##   0.80808081  0.8650728  0.2530165  0.6861273
##   0.81818182  0.8653501  0.2526256  0.6862904
##   0.82828283  0.8656304  0.2522339  0.6864600
##   0.83838384  0.8659110  0.2518447  0.6866273
##   0.84848485  0.8661970  0.2514506  0.6868015
##   0.85858586  0.8664907  0.2510482  0.6869845
##   0.86868687  0.8667910  0.2506394  0.6871711
##   0.87878788  0.8670918  0.2502337  0.6873559
##   0.88888889  0.8673971  0.2498238  0.6875437
##   0.89898990  0.8677029  0.2494164  0.6877322
##   0.90909091  0.8680107  0.2490090  0.6879263
##   0.91919192  0.8683266  0.2485922  0.6881262
##   0.92929293  0.8686414  0.2481810  0.6883256
##   0.93939394  0.8689606  0.2477662  0.6885344
##   0.94949495  0.8692861  0.2473455  0.6887517
##   0.95959596  0.8696115  0.2469282  0.6889698
##   0.96969697  0.8699367  0.2465148  0.6891865
##   0.97979798  0.8702677  0.2460950  0.6894048
##   0.98989899  0.8706053  0.2456683  0.6896323
##   1.00000000  0.8709482  0.2452370  0.6898676
## 
## RMSE was used to select the optimal model using the smallest value.
## The final value used for the model was fraction = 0.3939394.

##     fraction
## 40 0.3939394
## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

Test

if (algo.LARS.caret == TRUE){
  test.model(model.LARS.caret, data.test
             ,method = 'lars',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,draw.limits = TRUE)
}
## [1] "Summary of predicted values: "
##      Min.   1st Qu.    Median      Mean   3rd Qu.      Max. 
## -1.503371 -0.354374  0.007588 -0.011675  0.331160  1.441923 
## [1] "lars  Test MSE: 0.707749854584791"

LARS with CV (w/ filtered train)

Train

if (algo.LARS.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train2
                                   ,method = "lars"
                                   ,subopt = 'NULL'
                                   ,feature.names = feature.names)
  model.LARS.caret = returned$model
}
## Warning in nominalTrainWorkflow(x = x, y = y, wts = weights, info =
## trainInfo, : There were missing values in resampled performance measures.
## Aggregating results
## Selecting tuning parameters
## Fitting fraction = 0.576 on full training set
## Least Angle Regression 
## 
## 5694 samples
##  240 predictor
## 
## Pre-processing: centered (240), scaled (240) 
## Resampling: Cross-Validated (10 fold) 
## Summary of sample sizes: 5123, 5125, 5125, 5124, 5125, 5124, ... 
## Resampling results across tuning parameters:
## 
##   fraction    RMSE       Rsquared   MAE      
##   0.00000000  0.9153735        NaN  0.7377998
##   0.01010101  0.9006821  0.1577218  0.7275815
##   0.02020202  0.8874568  0.1577218  0.7183732
##   0.03030303  0.8759006  0.1622088  0.7104946
##   0.04040404  0.8652594  0.1893790  0.7031461
##   0.05050505  0.8553702  0.2057376  0.6963500
##   0.06060606  0.8463094  0.2167572  0.6901862
##   0.07070707  0.8380529  0.2323114  0.6842997
##   0.08080808  0.8301022  0.2488924  0.6784791
##   0.09090909  0.8224672  0.2626555  0.6728261
##   0.10101010  0.8152303  0.2735271  0.6674487
##   0.11111111  0.8084328  0.2820912  0.6623372
##   0.12121212  0.8021291  0.2900623  0.6575262
##   0.13131313  0.7960443  0.2984078  0.6529294
##   0.14141414  0.7902927  0.3052923  0.6485917
##   0.15151515  0.7848816  0.3109489  0.6445159
##   0.16161616  0.7798181  0.3155794  0.6407058
##   0.17171717  0.7751091  0.3193554  0.6371169
##   0.18181818  0.7707609  0.3224212  0.6337605
##   0.19191919  0.7668587  0.3252669  0.6306573
##   0.20202020  0.7632711  0.3281513  0.6277667
##   0.21212121  0.7600537  0.3307854  0.6251485
##   0.22222222  0.7572915  0.3332410  0.6228605
##   0.23232323  0.7547146  0.3359146  0.6206923
##   0.24242424  0.7521914  0.3387610  0.6185778
##   0.25252525  0.7498190  0.3413782  0.6165999
##   0.26262626  0.7476622  0.3436339  0.6147179
##   0.27272727  0.7458432  0.3454754  0.6131135
##   0.28282828  0.7442617  0.3470989  0.6117116
##   0.29292929  0.7429255  0.3484445  0.6104800
##   0.30303030  0.7417118  0.3497619  0.6093282
##   0.31313131  0.7407206  0.3508455  0.6083739
##   0.32323232  0.7398993  0.3517279  0.6075589
##   0.33333333  0.7391542  0.3525701  0.6067868
##   0.34343434  0.7384721  0.3533786  0.6060754
##   0.35353535  0.7379024  0.3540349  0.6054818
##   0.36363636  0.7374043  0.3546050  0.6049874
##   0.37373737  0.7369549  0.3551265  0.6045509
##   0.38383838  0.7365454  0.3556071  0.6041385
##   0.39393939  0.7361494  0.3560926  0.6037269
##   0.40404040  0.7357668  0.3565767  0.6033361
##   0.41414141  0.7354102  0.3570317  0.6030009
##   0.42424242  0.7350802  0.3574509  0.6026890
##   0.43434343  0.7347805  0.3578244  0.6023963
##   0.44444444  0.7345064  0.3581607  0.6021204
##   0.45454545  0.7342702  0.3584362  0.6018680
##   0.46464646  0.7340541  0.3586815  0.6016305
##   0.47474747  0.7338732  0.3588735  0.6014213
##   0.48484848  0.7337094  0.3590432  0.6012280
##   0.49494949  0.7335663  0.3591830  0.6010497
##   0.50505051  0.7334451  0.3592894  0.6008950
##   0.51515152  0.7333603  0.3593365  0.6007825
##   0.52525253  0.7332904  0.3593643  0.6006903
##   0.53535354  0.7332246  0.3593909  0.6006167
##   0.54545455  0.7331747  0.3593956  0.6005600
##   0.55555556  0.7331448  0.3593713  0.6005092
##   0.56565657  0.7331287  0.3593284  0.6004594
##   0.57575758  0.7331146  0.3592871  0.6004174
##   0.58585859  0.7331164  0.3592237  0.6003836
##   0.59595960  0.7331342  0.3591373  0.6003570
##   0.60606061  0.7331666  0.3590313  0.6003358
##   0.61616162  0.7332165  0.3589000  0.6003194
##   0.62626263  0.7332893  0.3587344  0.6003226
##   0.63636364  0.7333815  0.3585403  0.6003471
##   0.64646465  0.7335001  0.3583052  0.6003963
##   0.65656566  0.7336417  0.3580355  0.6004619
##   0.66666667  0.7338002  0.3577424  0.6005460
##   0.67676768  0.7339630  0.3574491  0.6006296
##   0.68686869  0.7341327  0.3571490  0.6007165
##   0.69696970  0.7343146  0.3568326  0.6008059
##   0.70707071  0.7345046  0.3565064  0.6009014
##   0.71717172  0.7347016  0.3561720  0.6010058
##   0.72727273  0.7349134  0.3558170  0.6011209
##   0.73737374  0.7351405  0.3554409  0.6012444
##   0.74747475  0.7353857  0.3550391  0.6013843
##   0.75757576  0.7356385  0.3546293  0.6015271
##   0.76767677  0.7358969  0.3542150  0.6016737
##   0.77777778  0.7361559  0.3538050  0.6018144
##   0.78787879  0.7364231  0.3533861  0.6019585
##   0.79797980  0.7367015  0.3529538  0.6021141
##   0.80808081  0.7369903  0.3525096  0.6022790
##   0.81818182  0.7372817  0.3520659  0.6024450
##   0.82828283  0.7375831  0.3516105  0.6026157
##   0.83838384  0.7378935  0.3511451  0.6027956
##   0.84848485  0.7382092  0.3506753  0.6029821
##   0.85858586  0.7385344  0.3501952  0.6031797
##   0.86868687  0.7388747  0.3496946  0.6033876
##   0.87878788  0.7392271  0.3491786  0.6036053
##   0.88888889  0.7395845  0.3486591  0.6038251
##   0.89898990  0.7399407  0.3481457  0.6040447
##   0.90909091  0.7402961  0.3476373  0.6042615
##   0.91919192  0.7406562  0.3471253  0.6044821
##   0.92929293  0.7410235  0.3466057  0.6047155
##   0.93939394  0.7413951  0.3460838  0.6049545
##   0.94949495  0.7417708  0.3455597  0.6051926
##   0.95959596  0.7421498  0.3450344  0.6054385
##   0.96969697  0.7425328  0.3445070  0.6056865
##   0.97979798  0.7429222  0.3439742  0.6059398
##   0.98989899  0.7433154  0.3434400  0.6061995
##   1.00000000  0.7437119  0.3429053  0.6064635
## 
## RMSE was used to select the optimal model using the smallest value.
## The final value used for the model was fraction = 0.5757576.

##     fraction
## 58 0.5757576
## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

Test

if (algo.LARS.caret == TRUE){
  test.model(model.LARS.caret, data.test
             ,method = 'lars',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,draw.limits = TRUE)
}
## [1] "Summary of predicted values: "
##     Min.  1st Qu.   Median     Mean  3rd Qu.     Max. 
## -1.77771 -0.45970 -0.04139 -0.06823  0.30320  1.62919 
## [1] "lars  Test MSE: 0.711598924914466"